blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6094e58198120626f12f5aa95fe32e016cc64ba5 | 20ed6e74d227e097a924e050bc82682381739fc7 | /src/tx/functional/list.py | 45c2627a517eeef89d294e0f38dfc335f6b719e2 | [
"MIT"
] | permissive | RENCI/tx-functional | c341f38293a889e125824822c47b0d1f1f3f87fb | 45427ab06b7d029940e250a5f189997a8111d3f0 | refs/heads/master | 2022-12-11T11:53:34.830442 | 2020-09-05T17:56:40 | 2020-09-05T17:56:40 | 264,998,389 | 1 | 1 | null | 2020-05-18T17:14:53 | 2020-05-18T16:32:09 | Python | UTF-8 | Python | false | false | 717 | py | from .traversable import Traversable
from typing import Generic, TypeVar, Callable, Any, List
from .functor import Functor
from .applicative import Applicative
from .utils import foldl, foldr, Arrow
S = TypeVar("S")
T = TypeVar("T")
def rec(ma: List[S], b: T, f: Callable[[S, T], T]) -> T:
return foldr(f, b, ma)
def _map(f: Arrow[S, T], ma: List[S]) -> List[T]:
return list(map(f, ma))
def append(l : List[T], a: T) -> List[T]:
return l + [a]
def sequenceA(m: Applicative, ma: List[Any]) -> Any:
return foldl(m.liftA2(append), m.pure([]), ma)
list_functor = Functor(_map)
def list_traversable(m: Applicative) -> Traversable:
return Traversable(_map, lambda ma: sequenceA(m, ma))
| [
"[email protected]"
] | |
2a15671d6f800b0ed904eda1e0736d12f02e3e02 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/39/usersdata/120/13433/submittedfiles/dec2bin.py | 9a40134fda961c27492e0c85f0f6f823e4b3bc9e | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | # -*- coding: utf-8 -*-
from __future__ import division
d=input('digite um numero na base decimal:')
cont=0
k=d
while k>0:
k=k//2
cont+=1
n=cont
i=0
soma=0
r=d
while i<=(n-1):
r=d%2
d=d//d
b=r*(10**i)
i+=1
soma=soma+b
print soma | [
"[email protected]"
] | |
1b8ca6d4285b1bf2ba6e8ef64cd2e7b25a83d932 | 2af6a5c2d33e2046a1d25ae9dd66d349d3833940 | /res_bw/scripts/common/lib/idlelib/idle_test/test_pathbrowser.py | c74b4f69f1a9d7f33751f283cef5ff88fc8318bc | [] | no_license | webiumsk/WOT-0.9.12-CT | e6c8b5bb106fad71b5c3056ada59fb1aebc5f2b2 | 2506e34bd6634ad500b6501f4ed4f04af3f43fa0 | refs/heads/master | 2021-01-10T01:38:38.080814 | 2015-11-11T00:08:04 | 2015-11-11T00:08:04 | 45,803,240 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 652 | py | # 2015.11.10 21:36:26 Střední Evropa (běžný čas)
# Embedded file name: scripts/common/Lib/idlelib/idle_test/test_pathbrowser.py
import unittest
import idlelib.PathBrowser as PathBrowser
class PathBrowserTest(unittest.TestCase):
def test_DirBrowserTreeItem(self):
d = PathBrowser.DirBrowserTreeItem('')
d.GetSubList()
if __name__ == '__main__':
unittest.main(verbosity=2, exit=False)
# okay decompyling c:\Users\PC\wotsources\files\originals\res_bw\scripts\common\lib\idlelib\idle_test\test_pathbrowser.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2015.11.10 21:36:26 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
ff60581243148821c05ad3c7d119f8f54162d2c1 | fafb10ff1b5ec8dcd3c131649afa9baa80159770 | /password.py | b873790aaf3464f365923513fb67ba9dd5799129 | [
"MIT"
] | permissive | pombredanne/webkit-vuln-finder | 94f182fb7a40e8f35cce6ed08598a97435d3bad7 | 5da9e457b5bfd3715d1d97811cf21750dc9b1082 | refs/heads/master | 2021-04-28T18:07:43.793971 | 2017-10-21T02:24:35 | 2017-10-21T02:24:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63 | py | username = "thatOneGuy"
password = "correcthorsebatterystaple"
| [
"[email protected]"
] | |
44d907b9a568613000577d7c233cfdfa14a4213a | 032117bbf248a76abd25fcc2355bc8ade84fa76a | /django/theproject2/pro3/app3/views.py | be4a8ac1bbf617042c03024c19496ce33a98df2b | [] | no_license | shefaligoel136/python_summer_training | ba8f28f6af008584b4239c73d466e4e9d35b4b01 | 0b97fea050342fe4ed95b18c5f7ed885a6c8ca23 | refs/heads/master | 2022-11-13T07:22:32.855717 | 2020-07-06T08:33:19 | 2020-07-06T08:33:19 | 277,480,122 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | from django.shortcuts import render
def home(request):
return render(request,'temphtml.html')
def solve1(request):
val1 = float(request.POST['num1'])
val2 = float(request.POST['num2'])
op = request.POST['op']
if(op=='+'):
ans = val1+val2
return render(request,'temphtml.html',{'res':ans})
elif(op=='-'):
ans = val1-val2
return render(request,'temphtml.html',{'res':ans})
elif(op=='*'):
ans = val1*val2
return render(request,'temphtml.html',{'res':ans})
elif(op=='/'):
ans = val1/val2
return render(request,'temphtml.html',{'res':ans})
else:
return render(request,'temphtml.html',{'res':'wrong expression'})
def solve2(request):
val = (request.POST['num'])
s = eval(val)
return render(request,'temphtml.html',{'res1':s}) | [
"[email protected]"
] | |
99aadc30ade96f7a2b39bb1935c8d592ccd03ed7 | 49a167d942f19fc084da2da68fc3881d44cacdd7 | /kubernetes_asyncio/test/test_policy_v1beta1_id_range.py | 6e5e62b8e765d805b6eb01144abad5213e8a04c2 | [
"Apache-2.0"
] | permissive | olitheolix/kubernetes_asyncio | fdb61323dc7fc1bade5e26e907de0fe6e0e42396 | 344426793e4e4b653bcd8e4a29c6fa4766e1fff7 | refs/heads/master | 2020-03-19T12:52:27.025399 | 2018-06-24T23:34:03 | 2018-06-24T23:34:03 | 136,546,270 | 1 | 0 | Apache-2.0 | 2018-06-24T23:52:47 | 2018-06-08T00:39:52 | Python | UTF-8 | Python | false | false | 1,030 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.10.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import kubernetes_asyncio.client
from kubernetes_asyncio.client.models.policy_v1beta1_id_range import PolicyV1beta1IDRange # noqa: E501
from kubernetes_asyncio.client.rest import ApiException
class TestPolicyV1beta1IDRange(unittest.TestCase):
"""PolicyV1beta1IDRange unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testPolicyV1beta1IDRange(self):
"""Test PolicyV1beta1IDRange"""
# FIXME: construct object with mandatory attributes with example values
# model = kubernetes_asyncio.client.models.policy_v1beta1_id_range.PolicyV1beta1IDRange() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
17e41741acf2c53e5af3b84136bdd4fb2cea28cd | ed6b358cfaf9bc61dab608b117c2cf0abcf90854 | /xichuangzhu/controllers/user.py | 5cf56bb8f0920a95c7c74bd335ae0e05c71f22c6 | [] | no_license | wallaceyuan/xichuangzhu | faa8fdec2a670661165d351ac3311126c8fc91e3 | ec45aa8b3f4b1e6b9b70537e270be89e97034c99 | refs/heads/master | 2021-01-20T21:34:45.949361 | 2014-05-23T07:29:50 | 2014-05-23T07:29:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,729 | py | # coding: utf-8
from __future__ import division
from flask import render_template, Blueprint, g
from ..models import User, CollectWork, CollectWorkImage, Work, WorkImage, WorkReview
from ..utils import check_is_me
from ..permissions import user_permission
bp = Blueprint('user', __name__)
@bp.route('/<user_abbr>')
def view(user_abbr):
"""用户主页"""
user = User.query.filter(User.abbr == user_abbr).first_or_404()
query = user.work_reviews
if not check_is_me(user.id):
query = query.filter(WorkReview.is_publish == True)
work_reviews = query.limit(3)
work_reviews_num = query.count()
topics = user.topics.limit(3)
work_images = user.work_images.limit(16)
return render_template('user/user.html', user=user, work_reviews=work_reviews,
work_reviews_num=work_reviews_num, topics=topics,
work_images=work_images)
@bp.route('/<user_abbr>/work_reviews', defaults={'page': 1})
@bp.route('/<user_abbr>/work_reviews/page/<int:page>')
def work_reviews(user_abbr, page):
"""用户的作品点评"""
user = User.query.filter(User.abbr == user_abbr).first_or_404()
work_reviews = user.work_reviews
if not check_is_me(user.id):
work_reviews = work_reviews.filter(WorkReview.is_publish == True)
paginator = work_reviews.paginate(page, 10)
return render_template('user/work_reviews.html', user=user, paginator=paginator)
@bp.route('/<user_abbr>/topics', defaults={'page': 1})
@bp.route('/<user_abbr>/topics/page/<int:page>')
def topics(user_abbr, page):
"""用户发表的话题"""
user = User.query.filter(User.abbr == user_abbr).first_or_404()
paginator = user.topics.paginate(page, 10)
return render_template('user/topics.html', user=user, paginator=paginator)
@bp.route('/<user_abbr>/work_images', defaults={'page': 1})
@bp.route('/<user_abbr>/work_images/page/<int:page>')
def work_images(user_abbr, page):
"""用户上传的作品图片"""
user = User.query.filter(User.abbr == user_abbr).first_or_404()
paginator = user.work_images.paginate(page, 16)
return render_template('user/work_images.html', user=user, paginator=paginator)
@bp.route('/collects')
@user_permission
def collects():
"""用户收藏页"""
collect_works = Work.query.join(CollectWork).filter(CollectWork.user_id == g.user.id).order_by(
CollectWork.create_time.desc()).limit(6)
collect_work_images = WorkImage.query.join(CollectWorkImage).filter(
CollectWorkImage.user_id == g.user.id).order_by(
CollectWorkImage.create_time.desc()).limit(9)
return render_template('user/collects.html', user=g.user, collect_works=collect_works,
collect_work_images=collect_work_images)
@bp.route('/collect_works', defaults={'page': 1})
@bp.route('/collect_works/page/<int:page>')
@user_permission
def collect_works(page):
"""用户收藏的文学作品"""
paginator = Work.query.join(CollectWork).filter(
CollectWork.user_id == g.user.id).order_by(
CollectWork.create_time.desc()).paginate(page, 10)
return render_template('user/collect_works.html', paginator=paginator)
@bp.route('/collect_work_images', defaults={'page': 1})
@bp.route('/collect_work_images/page/<int:page>')
@user_permission
def collect_work_images(page):
"""用户收藏的图片"""
paginator = WorkImage.query.join(CollectWorkImage).filter(
CollectWorkImage.user_id == g.user.id).order_by(
CollectWorkImage.create_time.desc()).paginate(page, 12)
return render_template('user/collect_work_images.html', paginator=paginator) | [
"[email protected]"
] | |
31a4fa2f4e3d61e550041ea39e8c9d96f5eb0e47 | 860c16b6f4eb612f2f62a4ff073ad4b930eaa38a | /planscore/districts.py | 3968cb6d5de2eba3e2111de19d64cc011017ffb9 | [] | no_license | johndpope/PlanScore | d1f0c93e8e11e8ed7486a0932653c23b17c5eaaa | a0c62a4f75d577ee21c297199ce974cc8ec8167a | refs/heads/master | 2021-01-25T04:15:17.529051 | 2017-06-03T03:06:56 | 2017-06-03T03:06:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,508 | py | import collections, json, io, gzip, statistics, time, base64, posixpath, pickle
from osgeo import ogr
import boto3, botocore.exceptions
from . import prepare_state, score, data
ogr.UseExceptions()
FUNCTION_NAME = 'PlanScore-RunDistrict'
class Partial:
''' Partially-calculated district sums, used by consume_tiles().
'''
def __init__(self, index, totals, precincts, tiles, geometry, upload):
self.index = index
self.totals = totals
self.precincts = precincts
self.tiles = tiles
self.geometry = geometry
self.upload = upload
def to_dict(self):
return dict(index=self.index, totals=self.totals,
precincts=len(self.precincts), tiles=self.tiles,
upload=self.upload.to_dict())
def to_event(self):
return dict(index=self.index, totals=self.totals, tiles=self.tiles,
geometry=self.geometry.ExportToWkt(), upload=self.upload.to_dict(),
precincts=Partial.scrunch(self.precincts))
@staticmethod
def from_event(event):
totals = event.get('totals')
precincts = event.get('precincts')
tiles = event.get('tiles')
geometry = ogr.CreateGeometryFromWkt(event['geometry'])
index = event['index']
upload = data.Upload.from_dict(event['upload'])
if totals is None or precincts is None or tiles is None:
totals, precincts, tiles = collections.defaultdict(int), [], get_geometry_tile_zxys(geometry)
return Partial(index, totals, Partial.unscrunch(precincts), tiles, geometry, upload)
@staticmethod
def scrunch(thing):
''' Scrunch a thing into a compact (?) textual representation.
'''
return base64.a85encode(gzip.compress(pickle.dumps(thing))).decode('ascii')
@staticmethod
def unscrunch(thing):
''' Accept a scrunched representation of a thing and return the thing.
Lists and dictionaries are simply returned instead of unscrunched.
'''
if type(thing) in (tuple, list, dict):
return thing
return pickle.loads(gzip.decompress(base64.a85decode(thing)))
def lambda_handler(event, context):
'''
'''
s3 = boto3.client('s3')
partial = Partial.from_event(event)
storage = data.Storage.from_event(event, s3)
start_time, times = time.time(), []
print('Starting with', len(partial.precincts),
'precincts and', len(partial.tiles), 'tiles remaining')
for (index, _) in enumerate(consume_tiles(storage, partial)):
times.append(time.time() - start_time)
start_time = time.time()
stdev = statistics.stdev(times) if len(times) > 1 else times[0]
cutoff_msec = 1000 * (statistics.mean(times) + 3 * stdev)
remain_msec = context.get_remaining_time_in_millis() - 30000 # 30 seconds for Lambda
if remain_msec > cutoff_msec:
# There's time to do more
continue
print('Iteration:', json.dumps(partial.to_dict()))
print('Stopping with', remain_msec, 'msec,', len(partial.precincts),
'precincts, and', len(partial.tiles), 'tiles remaining after',
index + 1, 'iterations.')
event = partial.to_event()
event.update(storage.to_event())
payload = json.dumps(event).encode('utf8')
print('Sending payload of', len(payload), 'bytes...')
lam = boto3.client('lambda')
lam.invoke(FunctionName=FUNCTION_NAME, InvocationType='Event',
Payload=payload)
return
final = post_score_results(storage, partial)
if not final:
return
print('All done, invoking', score.FUNCTION_NAME)
event = partial.upload.to_dict()
event.update(storage.to_event())
lam = boto3.client('lambda')
lam.invoke(FunctionName=score.FUNCTION_NAME, InvocationType='Event',
Payload=json.dumps(event).encode('utf8'))
def post_score_results(storage, partial):
'''
'''
key = partial.upload.district_key(partial.index)
body = json.dumps(dict(totals=partial.totals)).encode('utf8')
print('Uploading', len(body), 'bytes to', key)
storage.s3.put_object(Bucket=storage.bucket, Key=key, Body=body,
ContentType='text/json', ACL='private')
# Look for the other expected districts.
prefix = posixpath.dirname(key)
listed_objects = storage.s3.list_objects(Bucket=storage.bucket, Prefix=prefix)
existing_keys = [obj.get('Key') for obj in listed_objects.get('Contents', [])]
for index in range(len(partial.upload.districts)):
if partial.upload.district_key(index) not in existing_keys:
return False
# All of them were found
return True
def consume_tiles(storage, partial):
''' Generate a stream of steps, updating totals from precincts and tiles.
Inputs are modified directly, and lists should be empty at completion.
'''
# Start by draining the precincts list, which should be empty anyway.
while partial.precincts:
precinct = partial.precincts.pop(0)
score_precinct(partial, precinct)
# Yield once with an emptied precincts list.
yield
# Iterate over each tile, loading precincts and scoring them.
while partial.tiles:
tile_zxy = partial.tiles.pop(0)
for precinct in load_tile_precincts(storage, tile_zxy):
score_precinct(partial, precinct)
# Yield after each complete tile is processed.
yield
def score_precinct(partial, precinct):
'''
'''
precinct_geom = ogr.CreateGeometryFromJson(json.dumps(precinct['geometry']))
try:
overlap_geom = precinct_geom.Intersection(partial.geometry)
except RuntimeError as e:
if 'TopologyException' in str(e) and not precinct_geom.IsValid():
# Sometimes, a precinct geometry can be invalid
# so inflate it by a tiny amount to smooth out problems
precinct_geom = precinct_geom.Buffer(0.0000001)
overlap_geom = precinct_geom.Intersection(partial.geometry)
else:
raise
if precinct_geom.Area() == 0:
# If we're about to divide by zero, don't bother.
return
overlap_area = overlap_geom.Area() / precinct_geom.Area()
precinct_fraction = overlap_area * precinct['properties'][prepare_state.FRACTION_FIELD]
for name in score.FIELD_NAMES:
precinct_value = precinct_fraction * (precinct['properties'][name] or 0)
partial.totals[name] += precinct_value
def load_tile_precincts(storage, tile_zxy):
''' Get GeoJSON features for a specific tile.
'''
try:
object = storage.s3.get_object(Bucket=storage.bucket,
Key='{}/{}.geojson'.format(storage.prefix, tile_zxy))
except botocore.exceptions.ClientError as error:
if error.response['Error']['Code'] == 'NoSuchKey':
return []
raise
if object.get('ContentEncoding') == 'gzip':
object['Body'] = io.BytesIO(gzip.decompress(object['Body'].read()))
geojson = json.load(object['Body'])
return geojson['features']
def iterate_precincts(storage, precincts, tiles):
''' Generate a stream of precincts, getting new ones from tiles as needed.
Input lists are modified directly, and should be empty at completion.
'''
while precincts or tiles:
if precincts:
# There is a precinct to yield.
precinct = precincts.pop(0)
yield precinct
if tiles and not precincts:
# All out of precincts; fill up from the next tile.
tile_zxy = tiles.pop(0)
more_precincts = load_tile_precincts(storage, tile_zxy)
precincts.extend(more_precincts)
def get_geometry_tile_zxys(district_geom):
''' Return a list of expected tile Z/X/Y strings.
'''
if district_geom.GetSpatialReference():
district_geom.TransformTo(prepare_state.EPSG4326)
xxyy_extent = district_geom.GetEnvelope()
iter = prepare_state.iter_extent_tiles(xxyy_extent, prepare_state.TILE_ZOOM)
tiles = []
for (coord, tile_wkt) in iter:
tile_zxy = '{zoom}/{column}/{row}'.format(**coord.__dict__)
tile_geom = ogr.CreateGeometryFromWkt(tile_wkt)
if tile_geom.Intersects(district_geom):
tiles.append(tile_zxy)
return tiles
| [
"[email protected]"
] | |
2175a66e56fec5a6b38b8d8c9b58684e11ae83a5 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02721/s005838488.py | 67b4f1efce6fc3f21eede2e8af273a37d4a09818 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 344 | py | n , k , c = map(int,input().split())
s = input()
L = []
R = []
i = 0
j = n-1
while i<n and len(L)<k :
if s[i] == "o" :
L.append(i)
i += c
i += 1
while j>-1 and len(R)<k :
if s[j] == "o" :
R.append(j)
j -= c
j -= 1
R.reverse()
for x in range(k):
if R[x] == L[x]:
print(R[x]+1) | [
"[email protected]"
] | |
6528f2aef6ccb83cf7c93281d60781f7bd740da3 | 4912cbd47c19c58d142e6833911d70f5ea037357 | /question_bank/length-of-last-word/length-of-last-word.py | 802c71d7fdb456bf98226e268e3f524641dbadf5 | [
"Apache-2.0"
] | permissive | yatengLG/leetcode-python | a09a17cd9e60cafd9ff8ca9c068f5b70719c436f | 5d48aecb578c86d69835368fad3d9cc21961c226 | refs/heads/master | 2023-07-13T16:10:01.920716 | 2021-09-06T02:51:46 | 2021-09-06T02:51:46 | 286,969,109 | 13 | 6 | null | 2021-02-16T10:19:44 | 2020-08-12T09:13:02 | Python | UTF-8 | Python | false | false | 470 | py | # -*- coding: utf-8 -*-
# @Author : LG
"""
执行用时:32 ms, 在所有 Python3 提交中击败了96.39% 的用户
内存消耗:13.3 MB, 在所有 Python3 提交中击败了73.72% 的用户
解题思路:
见代码注释
"""
class Solution:
def lengthOfLastWord(self, s: str) -> int:
s = s.rstrip(' ') # 去除右侧空格
words = s.split(' ') # 以空格划开单词
return len(words[-1]) # 取最后一个单词的长度 | [
"[email protected]"
] | |
3e99bde13b9275c37392065bcce7d9a4fb67e948 | 3de2a746243ad1cb000994a06a0f9699db9a901f | /agc016a.py | 049fcd1810826d416cc69758d1fa09b721e56213 | [] | no_license | takumi152/atcoder | 71d726ffdf2542d8abac0d9817afaff911db7c6c | ebac94f1227974aa2e6bf372e18605518de46441 | refs/heads/master | 2022-10-30T12:14:41.742596 | 2022-09-29T19:49:32 | 2022-09-29T19:49:32 | 181,502,518 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 937 | py |
def main():
buf = input()
s = buf
characters = []
for i in s:
if not i in characters:
characters.append(i)
min_iter_count = 999
for i in characters:
iter_count = 0
t = s
while True:
flag = False
new_t = ""
for j in range(len(t) - 1):
if t[j] == i:
new_t += i
elif t[j+1] == i:
new_t += i
flag = True
else:
new_t += t[j]
flag = True
if t[-1] != i:
flag = True
t = new_t
if flag:
iter_count += 1
else:
break
if iter_count < min_iter_count:
min_iter_count = iter_count
print(min_iter_count)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
171430aa6e72848779736e903cf664b836f0d045 | 9ab9d9a3883471763edbceea59a0e83170581b5f | /eggs/Parsley-1.1-py2.7.egg/terml/test/test_terml.py | efb9991a475a1bc684728f0d8bc302bd17366807 | [
"CC-BY-2.5",
"AFL-2.1",
"AFL-3.0",
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | asmmhossain/phyG | 24dc211dad5b3e89c87ff384e841f2e98bbd52db | 023f505b705ab953f502cbc55e90612047867583 | refs/heads/master | 2022-11-21T12:43:46.172725 | 2014-02-14T12:33:08 | 2014-02-14T12:33:08 | 13,800,552 | 0 | 1 | NOASSERTION | 2020-07-25T21:05:41 | 2013-10-23T11:04:25 | Python | UTF-8 | Python | false | false | 6,108 | py | from twisted.trial import unittest
from ometa.runtime import ParseError
from terml.nodes import Tag, Term, coerceToTerm, TermMaker
from terml.parser import TermLParser, character, parseTerm
class TermMakerTests(unittest.TestCase):
def test_make(self):
m = TermMaker()
t1 = m.Foo(1, 'a', m.Baz())
self.assertEqual(t1, parseTerm('Foo(1, "a", Baz)'))
class ParserTest(unittest.TestCase):
"""
Test TermL parser rules.
"""
def getParser(self, rule):
def parse(src):
p = TermLParser(src)
result, error = p.apply(rule)
return result
return parse
def test_literal(self):
"""
Literals are parsed to literal terms.
"""
parse = self.getParser("literal")
self.assertEqual(parse('"foo bar"'),
Term(Tag('.String.'), "foo bar", None))
self.assertEqual(parse("'x'"),
Term(Tag('.char.'), 'x', None))
self.assertEqual(parse("0xDECAFC0FFEEBAD"),
Term(Tag('.int.'), 0xDECAFC0FFEEBAD, None))
self.assertEqual(parse("0755"),
Term(Tag('.int.'), 0755, None))
self.assertEqual(parse("3.14159E17"),
Term(Tag('.float64.'), 3.14159E17, None))
self.assertEqual(parse("1e9"),
Term(Tag('.float64.'), 1e9, None))
self.assertEqual(parse("0"), Term(Tag(".int."), 0, None))
self.assertEqual(parse("7"), Term(Tag(".int."), 7, None))
self.assertEqual(parse("-1"), Term(Tag(".int."), -1, None))
self.assertEqual(parse("-3.14"),
Term(Tag('.float64.'), -3.14, None))
self.assertEqual(parse("3_000"),
Term(Tag('.int.'), 3000, None))
self.assertEqual(parse("0.91"),
Term(Tag('.float64.'), 0.91, None))
self.assertEqual(parse("3e-2"),
Term(Tag('.float64.'), 3e-2, None))
self.assertEqual(parse("'\\n'"),
Term(Tag('.char.'), character("\n"), None))
self.assertEqual(parse('"foo\\nbar"'),
Term(Tag('.String.'), "foo\nbar", None))
self.assertEqual(parse("'\\u0061'"),
Term(Tag('.char.'), character("a"), None))
self.assertEqual(parse('"z\141p"'),
Term(Tag('.String.'), "zap", None))
self.assertEqual(parse('"x\41"'),
Term(Tag('.String.'), "x!", None))
self.assertEqual(parse('"foo\\\nbar"'),
Term(Tag('.String.'), "foobar", None))
def test_simpleTag(self):
"""
Tags are parsed properly.
"""
parse = self.getParser("tag")
self.assertEqual(parse("foo"), Tag("foo"))
self.assertEqual(parse('::"foo"'), Tag('::"foo"'))
self.assertEqual(parse("::foo"), Tag('::foo'))
self.assertEqual(parse("foo::baz"), Tag('foo::baz'))
self.assertEqual(parse('foo::"baz"'), Tag('foo::"baz"'))
self.assertEqual(parse("biz::baz::foo"), Tag('biz::baz::foo'))
self.assertEqual(parse("foo_yay"), Tag('foo_yay'))
self.assertEqual(parse("foo$baz32"), Tag('foo$baz32'))
self.assertEqual(parse("foo-baz.19"), Tag('foo-baz.19'))
def test_simpleTerm(self):
"""
Kernel syntax for terms is parsed properly.
"""
parse = self.getParser("baseTerm")
self.assertEqual(parse("x"), Term(Tag("x"), None, None))
self.assertEqual(parse("x()"), Term(Tag("x"), None, []))
self.assertEqual(parse("x(1)"), Term(Tag("x"), None,
(Term(Tag(".int."), 1, None),)))
self.assertEqual(parse("x(1, 2)"), Term(Tag("x"), None,
(Term(Tag(".int."), 1,
None),
Term(Tag(".int."), 2,
None))))
self.assertEqual(parse("1"), Term(Tag(".int."), 1, None))
self.assertEqual(parse('"1"'), Term(Tag(".String."), "1", None))
self.assertRaises(ValueError, parse, "'x'(x)")
self.assertRaises(ValueError, parse, '3.14(1)')
self.assertRaises(ValueError, parse, '"foo"(x)')
self.assertRaises(ValueError, parse, "1(2)")
def test_fullTerm(self):
"""
Shortcut syntax for terms is handled.
"""
self.assertEqual(parseTerm("[x, y, 1]"), parseTerm(".tuple.(x, y, 1)"))
self.assertEqual(parseTerm("{x, y, 1}"), parseTerm(".bag.(x, y, 1)"))
self.assertEqual(parseTerm("f {x, y, 1}"), parseTerm("f(.bag.(x, y, 1))"))
self.assertEqual(parseTerm("a: b"), parseTerm(".attr.(a, b)"))
self.assertEqual(parseTerm('"a": b'), parseTerm('.attr.("a", b)'))
self.assertEqual(parseTerm('a: [b]'), parseTerm('.attr.(a, .tuple.(b))'))
def test_multiline(self):
"""
Terms spread across multiple lines are parsed correctly.
"""
single = parseTerm('foo(baz({x: "y", boz: 42}))')
multi = parseTerm(
"""foo(
baz({
x: "y",
boz: 42}
))""")
self.assertEqual(multi, single)
def test_leftovers(self):
e = self.assertRaises(ParseError, parseTerm, "foo(x) and stuff")
self.assertEqual(e.position, 7)
def test_unparse(self):
def assertRoundtrip(txt):
self.assertEqual('term(%r)' % (txt,), repr(parseTerm(txt)))
cases = ["1", "3.25", "f", "f(1)", "f(1, 2)", "f(a, b)",
"{a, b}", "[a, b]", "f{1, 2}", '''{"name": "Robert", attrs: {'c': 3}}''']
for case in cases:
assertRoundtrip(case)
def test_coerce(self):
self.assertEqual(
coerceToTerm({3: 4, "a": character('x'), (2, 3): [4, 5]}),
parseTerm('{"a": \'x\', 3: 4, [2, 3]: [4, 5]}'))
| [
"[email protected]"
] | |
90fb3d0f41e8ef893dcba8eb07565e63eab33256 | 66c3ff83c3e3e63bf8642742356f6c1817a30eca | /.vim/tmp/neocomplete/include_cache/=+home=+abel=+proyectos=+django=+ventas=+ventas=+settings.py | 8daa573333992dd219d46614e2d6841793e10cab | [] | no_license | pacifi/vim | 0a708e8bc741b4510a8da37da0d0e1eabb05ec83 | 22e706704357b961acb584e74689c7080e86a800 | refs/heads/master | 2021-05-20T17:18:10.481921 | 2020-08-06T12:38:58 | 2020-08-06T12:38:58 | 30,074,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,305 | py | [{'word': 'ALLOWED_HOSTS', 'menu': '[I]', 'kind': 'v', 'abbr': 'ALLOWED_HOSTS = []'}, {'word': 'BASE_DIR', 'menu': '[I]', 'kind': 'v', 'abbr': 'BASE_DIR = os.path.dirname(os.path.dirname(__file__))'}, {'word': 'DATABASES', 'menu': '[I]', 'kind': 'v', 'abbr': 'DATABASES = {'}, {'word': 'DEBUG', 'menu': '[I]', 'kind': 'v', 'abbr': 'DEBUG = True'}, {'word': 'INSTALLED_APPS', 'menu': '[I]', 'kind': 'v', 'abbr': 'INSTALLED_APPS = ('}, {'word': 'LANGUAGE_CODE', 'menu': '[I]', 'kind': 'v', 'abbr': 'LANGUAGE_CODE = ''en-us'''}, {'word': 'MIDDLEWARE_CLASSES', 'menu': '[I]', 'kind': 'v', 'abbr': 'MIDDLEWARE_CLASSES = ('}, {'word': 'ROOT_URLCONF', 'menu': '[I]', 'kind': 'v', 'abbr': 'ROOT_URLCONF = ''ventas.urls'''}, {'word': 'STATIC_URL', 'menu': '[I]', 'kind': 'v', 'abbr': 'STATIC_URL = ''/static/'''}, {'word': 'TEMPLATE_DEBUG', 'menu': '[I]', 'kind': 'v', 'abbr': 'TEMPLATE_DEBUG = True'}, {'word': 'TIME_ZONE', 'menu': '[I]', 'kind': 'v', 'abbr': 'TIME_ZONE = ''UTC'''}, {'word': 'USE_I18N', 'menu': '[I]', 'kind': 'v', 'abbr': 'USE_I18N'}, {'word': 'USE_L10N', 'menu': '[I]', 'kind': 'v', 'abbr': 'USE_L10N'}, {'word': 'USE_TZ', 'menu': '[I]', 'kind': 'v', 'abbr': 'USE_TZ = True'}, {'word': 'WSGI_APPLICATION', 'menu': '[I]', 'kind': 'v', 'abbr': 'WSGI_APPLICATION = ''ventas.wsgi.application'''}]
| [
"[email protected]"
] | |
e843c39e8e4989e30428e9ca261411b48af05bc5 | c0450361aa707635f5bf70eff21c1235d7e60cfa | /Lessons by HoudyHo/lesson (32).py | c992dd1dcb5591096afb4678ee5bf2a1ecc56285 | [] | no_license | zarkaltair/Learn-python | f48810b86e9832f4c364c345d1fa8624f9ced683 | dd6114b5bd6cc30eff328002521041dd2be2c3c5 | refs/heads/master | 2020-04-10T05:48:51.052751 | 2019-01-23T18:48:34 | 2019-01-23T18:48:34 | 160,837,639 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 911 | py | # Class
class Cat:
def __init__(self, color, legs):
self.color = color
self.legs = legs
felix = Cat('ginger', 4)
print(felix.color)
rover = Cat('dog-colored', 4)
stumpy = Cat('brown', 3)
class Student:
def __init__(self, name):
self.name = name
test = Student('Bob')
print(test.name)
class Dog:
def __init__(self, name, color):
self.name = name
self.color = color
def bark(self):
print('Woof!')
fido = Dog('Fido', 'brown')
print(fido.name)
fido.bark()
class Dog:
legs = 4
def __init__(self, name, color):
self.name = name
self.color = color
fido = Dog('fido', 'brown')
print(fido.legs)
print(Dog.legs)
class Student:
def __init__(self, name):
self.name = name
def sayHi(self):
print('Hi from ' + self.name)
s1 = Student('Amy')
s1.sayHi()
class Rectangle:
def __init__(self, width, height):
self.width = width
self.height = height
rect = Rectangle(7, 8)
print(rect.color)
| [
"[email protected]"
] | |
3e7b548f6b5cdbd48c47d9c85724e93cbb569120 | 2b25aae9266437b657e748f3d6fea4db9e9d7f15 | /graphics/line/4/richard-zhan/main.py | 543aa01f31e1d50c82c08b4e4ca0d48c0b406554 | [] | no_license | Zilby/Stuy-Stuff | b1c3bc23abf40092a8a7a80e406e7c412bd22ae0 | 5c5e375304952f62667d3b34b36f0056c1a8e753 | refs/heads/master | 2020-05-18T03:03:48.210196 | 2018-11-15T04:50:03 | 2018-11-15T04:50:03 | 24,191,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 871 | py | from display import *
from draw import *
screen = new_screen()
color = [ 0, 255, 0 ]
matrix = []
#octant I
# add_edge(matrix, 0, 0, 0, XRES - 1, YRES - 75, 0 )
# add_edge(matrix, 0, 0, 0, XRES - 75, YRES - 1, 0 )
# add_edge(matrix, 0, YRES - 1, 0, XRES - 1, 75, 0 )
# add_edge(matrix, 0, YRES - 1, 0, XRES - 75, 0, 0 )
# add_edge(matrix,0,400,0,250,300,0)
# add_edge(matrix,0,300,0,250,200,0)
# add_edge(matrix,0,150,0,450,325,0)
add_edge(matrix,0,250,0,250,0,0)
add_edge(matrix,250,0,0,499,250,0)
add_edge(matrix,499,250,0,250,499,0)
add_edge(matrix,250,499,0,0,250,0)
draw_lines( matrix, screen, color )
matrix=[]
add_edge(matrix,125,125,0,375,125,0)
add_edge(matrix,375,125,0,375,375,0)
add_edge(matrix,375,375,0,125,375,0)
add_edge(matrix,125,375,0,125,125,0)
# add_edge(matrix,0,250,0,250,0,0)
color = [255,0,0]
draw_lines(matrix,screen,color)
display(screen)
| [
"[email protected]"
] | |
08388f40f96262e48a825ed8578c70f7e147a701 | 66fe4dbcb81ceb688fa557c9a05a92779bd4e263 | /config/config.py | 97c447221012cfb133c0e71153480f5577f69a13 | [] | no_license | AdamEECS/sc | 5d3e98d697dd891dfdbae910d0167a0ce1082f19 | 387930acb7af4c04b39415e923639cad458fda09 | refs/heads/master | 2021-01-01T06:28:18.465633 | 2018-08-16T07:56:35 | 2018-08-16T07:56:35 | 97,430,842 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,265 | py | from pymongo import *
import os
config_dict = dict(
USER_AVATAR_DIR='static/user_avatar/',
PRODUCT_PIC_DIR='static/product_pic/',
UPLOAD_FILE_DIR='static/files/',
PRODUCT_PIC_EXT='png',
CDN_URL='http://opguqe876.bkt.clouddn.com/',
CDN_USER_AVATAR_DIR='/user_avatar/',
CDN_PRODUCT_PIC_DIR='/product_pic/',
CDN_BUCKET='buy-suzumiya',
QINIU_CALLBACK_URL='https://buy.suzumiya.cc/callback/all',
PIC_UPLOAD_URL='https://up-z1.qbox.me/',
SEND_EMAIL_URL='https://api.mailgun.net/v3/mg.suzumiya.cc/messages',
SEND_EMAIL_FROM='Suzumiya <[email protected]>',
BASE_URL='http://localhost:8001',
MAX_CONTENT_LENGTH=2 * 1024 * 1024,
ALLOWED_UPLOAD_TYPE=['jpg', 'jpeg', 'gif', 'png', 'ico'],
PINGPP_PRIVATE_KEY_PATH=os.path.join(os.path.dirname(__file__), 'mtk_rsa.pem'),
ALIPAY_PRIVATE_KEY_PATH=os.path.join(os.path.dirname(__file__), 'mtk_rsa.pem'),
ALIPAY_PUBLIC_KEY_PATH=os.path.join(os.path.dirname(__file__), 'ali_pub.pem'),
ALIPAY_CALLBACK_URL="http://yc.miteke.com/callback/ali",
ALIPAY_RETURN_URL="http://yc.miteke.com/user/profile",
ALIPAY_APPID="2017092008837195",
)
# mongodb config
db_name = 'mongo_sc'
client = MongoClient("mongodb://localhost:27017")
db = client[db_name]
| [
"[email protected]"
] | |
beb5886b6bb03f8e0149d52f247c773ab8efa39e | 0789766b3f242835f3c4e03e573f4d2fa3ebbc5a | /my_nas/dataset/imagenet_downsample.py | c2d73f91310cc798966575e69aef00dd70867fed | [] | no_license | Anonymous-1112/anonymous | 05900a2a5feba3a48ad76847a22a8c3a3f35b2e1 | d86ec6b35b681c9220150c68bb5eb10af26f5629 | refs/heads/master | 2023-07-01T19:49:57.400134 | 2021-08-08T15:29:12 | 2021-08-08T15:36:56 | 393,964,141 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,872 | py | # -*- coding: utf-8 -*-
import os
import pickle
from PIL import Image
import numpy as np
from torchvision import transforms
from torchvision.datasets import vision
from my_nas.utils.torch_utils import Cutout
from my_nas.dataset.base import BaseDataset
class ImageNetDownsampleDataset(vision.VisionDataset):
train_list = [
"train_data_batch_1",
"train_data_batch_2",
"train_data_batch_3",
"train_data_batch_4",
"train_data_batch_5",
"train_data_batch_6",
"train_data_batch_7",
"train_data_batch_8",
"train_data_batch_9",
"train_data_batch_10"
]
test_list = [
"val_data"
]
def __init__(self, root, num_class=1000, size=16, train=True,
transform=None, target_transform=None):
super(ImageNetDownsampleDataset, self).__init__(root, transform=transform,
target_transform=target_transform)
self.train = train # training set or test set
file_list = self.train_list if self.train else self.test_list
self.num_class = num_class # the first `num_class` classes are kept
len_ = 3 * size * size
self.data = np.zeros((0, len_), dtype=np.uint8)
self.targets = []
for file_name in file_list:
file_path = os.path.join(self.root, file_name)
with open(file_path, "rb") as f:
entry = pickle.load(f)
if num_class < 1000:
mask = np.array(entry["labels"]) <= num_class
self.data = np.concatenate((self.data, entry["data"][mask]), axis=0)
self.targets.extend(list((np.array(entry["labels"]) - 1)[mask]))
else:
self.data = np.concatenate((self.data, entry["data"]), axis=0)
self.targets.extend(list(np.array(entry["labels"]) - 1))
self.data = self.data.reshape(-1, 3, size, size).transpose((0, 2, 3, 1)) # HWC for PIL
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target class.
"""
img, target = self.data[index], self.targets[index]
# doing this so that it is consistent with all other datasets
# to return a PIL Image
img = Image.fromarray(img)
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self):
return len(self.data)
class ImageNetDownsample(BaseDataset):
NAME = "imagenet_downsample"
def __init__(self, num_class=120, size=16, relative_dir=None, cutout=None):
super(ImageNetDownsample, self).__init__(relative_dir=relative_dir)
self.cutout = cutout
self.num_class = num_class
self.size = size
# array([122.68245678, 116.65812896, 104.00708381])
imgnet_mean = [0.48110767, 0.45748286, 0.40787092]
imgnet_std = [0.229, 0.224, 0.225] # use imgnet
train_transform = transforms.Compose([
transforms.RandomCrop(16, padding=2), # follow NB201
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize(imgnet_mean, imgnet_std),
])
if self.cutout:
train_transform.transforms.append(Cutout(self.cutout))
test_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(imgnet_mean, imgnet_std),
])
self.datasets = {}
self.datasets["train"] = ImageNetDownsampleDataset(
root=self.data_dir, num_class=self.num_class, size=self.size,
train=True, transform=train_transform)
self.datasets["train_testTransform"] = ImageNetDownsampleDataset(
root=self.data_dir, num_class=self.num_class, size=self.size,
train=True, transform=test_transform)
self.datasets["test"] = ImageNetDownsampleDataset(
root=self.data_dir, num_class=self.num_class, size=self.size,
train=False, transform=test_transform)
def same_data_split_mapping(self):
return {"train_testTransform": "train"}
def splits(self):
return self.datasets
@classmethod
def data_type(cls):
return "image"
def __reduce__(self):
"""
Python 3
reduce for pickling (mainly for use with async search see trainer/async_trainer.py)
"""
return ImageNetDownsample, (self.cutout,)
def __getinitargs__(self):
"""
Python 2
getinitargs for pickling (mainly for use with async search see trainer/async_trainer.py)
"""
return (self.cutout,)
| [
"[email protected]"
] | |
54cdd7f5ce0fc2040583d0605b91c1bddb75daee | 68e0a967f52fd86e82f80dc4fd9198449f8f1030 | /doc/.src/book/scripts.py | 09eacba33e843d93599017499cf00bfdf3b8d05a | [] | no_license | htphuc/fdm-book | 4ac32a30506a83fd1ae35c2fe1934d194ea11686 | 07b15f987374b3e91d21ab14c06cfc0a79634936 | refs/heads/master | 2021-01-17T20:59:48.188917 | 2016-04-25T21:06:12 | 2016-04-25T21:06:12 | 57,400,233 | 1 | 0 | null | 2016-04-29T17:01:53 | 2016-04-29T17:01:52 | null | UTF-8 | Python | false | false | 5,340 | py | import sys, re, os, shutil, glob
chapters = "vib wave diffu trunc staggered softeng2 formulas advec".split()
chaptersdir = 'chapters'
ignored_files = '*.o *.so *.a *.pyc *.bak *.swp *~ .*~ *.old tmp* temp* .#* \\#* *.log *.dvi *.aux *.blg *.idx *.nav *.out *.toc *.snm *.vrb *.cproject *.project .DS_Store Trash'.split()
def chapter_visitor(action=None, chapters=chapters):
"""Visit dirs in chapters and call/perform action."""
if isinstance(action, str):
action = re.split('r\s*;\s*', action)
if isinstance(action, (tuple,list)):
# Wrap Unix commands and run
def action_function():
for command in action:
print command
failure = os.system(command)
if failure:
print 'failure in execution...'; sys.exit(1)
elif callable(action):
action_function = action
prefix = os.path.join(os.pardir, chaptersdir)
thisdir = os.getcwd()
for chapter in chapters:
destination = os.path.join(prefix, chapter)
if os.path.isdir(destination):
print 'visiting directory', destination
os.chdir(destination)
action_function()
os.chdir(thisdir)
else:
print '\n*** error: directory %s does not exist!' % destination
sys.exit(1)
def clean():
"""
Remove all files that can be regenerated.
Method: run common ../clean.sh in all chapter dirs +
doconce clean in this book dir.
"""
chapter_visitor('bash -x ../clean.sh')
os.system('doconce clean')
# Remove reduant files
redundant = glob.glob('newcommands*.tex')
for filename in redundant:
os.remove(filename)
def compile_chapters():
"""
Compile all chapters as stand-alone PDF documents.
Method: run make.sh in all chapter dirs.
"""
chapter_visitor('rm -rf tmp*; bash -x make.sh')
def make_links(chapters=chapters):
"""Make links to all src-* and fig-* dirs for all chapters."""
prefix = os.path.join(os.pardir, chaptersdir)
for chapter in chapters:
destination = os.path.join(prefix, chapter)
subdirs = [tp + '-' + chapter for tp in 'fig', 'src', 'mov', 'exer']
for subdir in subdirs:
if not os.path.islink(subdir):
dest_subdir = os.path.join(destination, subdir)
if os.path.isdir(dest_subdir):
os.symlink(dest_subdir, subdir)
print 'created local link %s to %s' % (subdir, destination)
# Sometimes manual additions are needed here, e.g.,
#os.symlink(os.path.join(prefix, 'tech', 'fig2'), 'fig2')
def spellcheck():
"""Visit each individual chapter and spellcheck all *.do.txt in it."""
chapter_visitor('rm -rf tmp*; doconce spellcheck -d .dict4spell.txt *.do.txt')
def pack_src(root='src', tarfile='book-examples.tar.gz', chapters=chapters):
"""
Publish programs, libraries, data, etc. from the book.
Method: make new directory tree root, copy all src-name dirs
from all chapters to name.
This root tree can be synced to an external repo or packed
as a tar or zip file.
"""
shutil.rmtree(root)
os.mkdir(root)
os.chdir(root)
prefix = os.path.join(os.pardir, os.pardir, chaptersdir)
thisdir = os.getcwd()
for chapter in chapters:
src = 'src-' + chapter
# Clean up redundant files that we will not publish
destination = os.path.join(prefix, src)
if os.path.isdir(destination):
os.chdir(destination)
for file_spec in ignored_files:
for filename in glob.glob(file_spec):
os.remove(filename)
print 'removed', 'src-%s/%s' % (chapter, filename)
os.chdir(thisdir)
# Copy files
shutil.copytree(destination, chapter)
print '\ndirectory tree with source code files for the book:', root
os.chdir(os.pardir)
os.system('tar czf %s %s' % (tarfile, root))
print 'tarfile:', tarfile
def externaldocuments():
# Find all candidate documents in ../chapters/*
prefix = os.path.join(os.pardir, chaptersdir)
#dirs = [name for name in os.listdir(prefix)
# if os.path.isdir(os.path.join(prefix, name))]
dirs = chapters[:]
docs = []
for nickname in dirs:
mainfiles = glob.glob(os.path.join(prefix, nickname, 'main_*.do.txt'))
for mainfile in mainfiles:
docs.append((nickname, mainfile[:-7])) # drop .do.txt
mainfiles = [mainfile for nickname, mainfile in docs]
# Need to visit all dirs, remove that dir from the list and subst
for mainfile in mainfiles:
other_mainfiles = mainfiles[:] # copy
other_mainfiles.remove(mainfile)
# Strip off ../chapters to ../
other_mainfiles = ['../' + mainfile[12:] for mainfile in mainfiles]
f = open(mainfile + '.do.txt', 'r')
text = f.read()
f.close()
text = re.sub('^# Externaldocuments:.*', '# Externaldocuments: ' +
', '.join(other_mainfiles), text, flags=re.MULTILINE)
print 'subst in', mainfile
f = open(mainfile + '.do.txt', 'w')
f.write(text)
f.close()
print 'updated # Externaldocuments in', mainfile, 'with\n ', ', '.join(other_mainfiles)
| [
"[email protected]"
] | |
9569e88a4594523c588bf67478cf3e69e5fa07d3 | eae3d77ac72c168cee7701462f1fc45d7d4dcd91 | /Tree/5176_이진탐색.py | 1d5de72b6fe0d9ddc3f43d237019f001829c7471 | [] | no_license | ByeongjunCho/Algorithm-TIL | ed2f018d50bd2483bd1175ff9bf7e91913c14766 | ad79125a1498915fe97c1d57ee6860b06c410958 | refs/heads/master | 2022-07-19T15:12:23.689319 | 2020-05-18T08:37:09 | 2020-05-18T08:37:09 | 256,399,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 286 | py | T = int(input())
for tc in range(1, T+1):
N = int(input())
V = [0] * (N+1)
L = [0] * (N+1)
R = [0] * (N+1)
# 이진트리 구현
i = 1
while (i << 1) < N+1:
L[i] = i << 1
if (i << 1) + 1 < N+1:
R[i] = (i << 1) + 1
i += 1
| [
"[email protected]"
] | |
b92ace36f8eaa5fa5bd1a781ed1656742c2db3c5 | a2c90d183ac66f39401cd8ece5207c492c811158 | /Solving_Problem/daily_222/1111/17140.py | 524cee2fdd156023f681b4bf34cde15944c9a1c3 | [] | no_license | kwoneyng/TIL | 0498cfc4dbebbb1f2c193cb7c9459aab7ebad02a | c6fbaa609b2e805f298b17b1f9504fd12cb63e8a | refs/heads/master | 2020-06-17T11:53:38.685202 | 2020-03-18T01:29:36 | 2020-03-18T01:29:36 | 195,916,103 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,924 | py | from collections import deque
from heapq import heappop, heappush
def rcal():
global y
ls = []
mx = 0
for i in range(x):
su = len(bd[i])
cnt = 0
for j in range(1,101):
if bd[i].count(j):
heappush(ls,[bd[i].count(j),j])
cnt += bd[i].count(j)
if cnt == su:
break
bd[i] = []
for _ in range(len(ls)):
many, su = heappop(ls)
bd[i].append(su)
bd[i].append(many)
mx = max(mx, len(bd[i]),y)
for i in range(x):
for _ in range(mx-len(bd[i])):
bd[i].append(0)
y = mx
def ccal():
global x
new = [[] for i in range(y)]
ls = []
re_bd = []
mx = 0
for i in range(y):
cnt = 0
bls =[]
for j in range(x):
bls.append(bd[j][i])
su = len(bls)-bls.count(0)
for k in range(1,101):
if bls.count(k):
heappush(ls,[bls.count(k),k])
cnt += bls.count(k)
if cnt == su:
break
for _ in range(len(ls)):
many, su = heappop(ls)
new[i].append(su)
new[i].append(many)
mx = max(mx, len(new[i]),x)
for i in range(y):
for _ in range(mx-len(new[i])):
new[i].append(0)
x = mx
for i in range(x):
ls = []
for j in range(y):
ls.append(new[j][i])
re_bd.append(ls)
return re_bd
def debug():
for i in bd:
print(i)
print('-------------------------')
r,c,k = map(int,input().split())
r -= 1
c -= 1
x,y = 3,3
bd = [list(map(int,input().split())) for i in range(x)]
for i in range(101):
if r < x and c < y:
if bd[r][c] == k:
print(i)
break
if x >= y:
rcal()
else:
bd = ccal()
# debug()
else:
print(-1) | [
"[email protected]"
] | |
efa5c09e00baf175a267323493146e4a079511df | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_5/sctjas002/question4.py | e4b18527ed1d0802f17d9b728e65f1ab2eec2bbd | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | import math
(fx)=input('Enter a function f(x):\n')
for y in range (10,-11,-1):
for x in range (-10,11):
if y==round((eval((fx)))):
print('o',end='')
elif y==0 and x==0:
print('+',end='')
elif y==0:
print('-',end='')
elif x==0:
print('|' ,end='')
else:
print(' ',end='')
print()
| [
"[email protected]"
] | |
db99a7fae497a54bcf8582832888bcb9835fca74 | 30d1902232eb9ddb84fdf5404a3a1dfd6232406a | /wxpython/test/sxn.spec | 9fb6f075ebf9e03cbe029bbde4b5335d0aaab5e1 | [] | no_license | sxnys/mypython | c3a768b054077ed97ff1e2fac31cb93f0765deb3 | de48cd883ad2de3320cb0c6b46b451ebb2311ac7 | refs/heads/master | 2022-11-07T15:11:48.936412 | 2019-04-14T12:04:30 | 2019-04-14T12:04:30 | 119,686,106 | 0 | 1 | null | 2022-10-31T05:13:00 | 2018-01-31T12:46:06 | Python | UTF-8 | Python | false | false | 835 | spec | # -*- mode: python -*-
block_cipher = None
a = Analysis(['sxn.py'],
pathex=['F:\\Python\\wxpython\\test'],
binaries=[],
datas=[],
hiddenimports=[],
hookspath=[],
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
exclude_binaries=True,
name='sxn',
debug=False,
strip=False,
upx=True,
console=True )
coll = COLLECT(exe,
a.binaries,
a.zipfiles,
a.datas,
strip=False,
upx=True,
name='sxn')
| [
"[email protected]"
] | |
80d512046627f21ec6e5b8db3615ee5f70869009 | f3d40fcd992b38132ff9634d2b76988a99cefb3b | /pycoinnet/util/BlockChainStore.py | 86653b8c71a73bfe27c1a3f2bfbd1b35acc4ec52 | [
"MIT"
] | permissive | richardkiss/pycoinnet | b9b999dbf0401722e4550c5926197881e5b13102 | 57a7f439f0b4c9102cd25f95c0b7e4db00fe2f5b | refs/heads/master | 2022-04-27T19:15:39.098602 | 2021-12-25T23:26:24 | 2021-12-25T23:26:24 | 16,194,216 | 117 | 56 | MIT | 2021-12-25T23:26:24 | 2014-01-24T03:43:42 | Python | UTF-8 | Python | false | false | 2,040 | py | import logging
import os
class FakeHeader:
def __init__(self, h, previous_block_hash):
self.h = h
self.previous_block_hash = previous_block_hash
self.difficulty = 1
def hash(self):
return self.h
def __repr__(self):
return "%s (parent %s)" % (self.h, self.previous_block_hash)
def __eq__(self, other):
return self.h == other.h and self.previous_block_hash == other.previous_block_hash
def __hash__(self):
return self.h.__hash__()
class BlockChainStore:
BLOCK_HASHES_PATH = "locked_block_hashes.bin"
def __init__(self, dir_path, parent_to_0=b'\0' * 32):
self.dir_path = dir_path
self.parent_to_0 = parent_to_0
def block_tuple_iterator(self):
try:
with open(os.path.join(self.dir_path, self.BLOCK_HASHES_PATH), "rb") as f:
prev_hash = self.parent_to_0
while 1:
d = f.read(16384)
if len(d) == 0:
return
while len(d) >= 32:
the_hash = d[:32]
yield (the_hash, prev_hash, 1)
prev_hash = the_hash
d = d[32:]
except Exception:
pass
def headers(self):
for the_hash, prev_hash, weight in self.block_tuple_iterator():
yield FakeHeader(the_hash, prev_hash)
def did_lock_to_index(self, block_tuple_list, start_index):
with open(os.path.join(self.dir_path, self.BLOCK_HASHES_PATH), "a+b") as f:
pass
with open(os.path.join(self.dir_path, self.BLOCK_HASHES_PATH), "r+b") as f:
f.seek(start_index*32)
count = 0
# ## TODO: make sure the one we're writing is in the right place
for the_hash, parent_hash, weight in block_tuple_list:
f.write(the_hash)
count += 1
logging.debug("wrote %d items to block chain store at %s", count, self.dir_path)
| [
"[email protected]"
] | |
308b6b16a55851f143ffb7afe1ce0b0fa3f85bf3 | e254c72d3fd11306c8625c5d8ad8ac394eabc6c6 | /06.scrapy/AppleDailySearchMongo/AppleDailySearch/settings.py | 68673e439c56176af474067092b73ea14f949c88 | [] | no_license | Edward83528/crawlerToMachinLearningAndBot | 87c7ea92779b949ad5015612a4e70275becab480 | 82818137b517f4c5a856535f83a8cb8b211da8aa | refs/heads/master | 2022-11-06T19:41:20.473933 | 2020-07-04T14:01:07 | 2020-07-04T14:01:07 | 268,072,162 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,426 | py | # -*- coding: utf-8 -*-
# Scrapy settings for AppleDailySearch project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'AppleDailySearch'
SPIDER_MODULES = ['AppleDailySearch.spiders']
NEWSPIDER_MODULE = 'AppleDailySearch.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'AppleDailySearch (+http://www.yourdomain.com)'
USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'AppleDailySearch.middlewares.MyCustomSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'AppleDailySearch.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'AppleDailySearch.pipelines.SomePipeline': 300,
#}
ITEM_PIPELINES = {
'AppleDailySearch.pipelines.JsonWithEncodingPipeline': 300,
}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| [
"[email protected]"
] | |
4b05388caf49263247f5a9216da4d2033fdccc11 | c7f353cc14439fc47d351bd29258c9453cf16f32 | /h2o-py/tests/testdir_munging/pyunit_ischaracter_isnumeric.py | aa42b41611f89e9681ba6234c5bba7e04441ba66 | [
"Apache-2.0"
] | permissive | tamseo/h2o-3 | a131f40a0cd7f0c52d359b06b300f87d627cfd83 | cc59fa0d97325796c5a57085661cea7b34fa81e9 | refs/heads/master | 2020-12-11T09:27:54.589687 | 2015-10-19T21:56:12 | 2015-10-19T21:56:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 639 | py | import sys
sys.path.insert(1, "../../")
import h2o, tests
import random
def pyunit_ischaracter_isnumeric():
iris = h2o.import_file(tests.locate("smalldata/iris/iris.csv"))
assert iris[0].isnumeric(), "Expected the first column of iris to be numeric"
assert not iris[0].ischaracter(), "Expected the first column of iris to be numeric"
assert not iris[4].isnumeric(), "Expected the last column of iris to be character"
iris[4] = iris[4].ascharacter()
assert iris[4].isstring(), "Expected the last column of iris to be a string"
if __name__ == "__main__":
tests.run_test(sys.argv, pyunit_ischaracter_isnumeric)
| [
"[email protected]"
] | |
8dfe9a9df3bccbd5d817c8705b15fc06fd4569ce | ae06af824e864fab8d33f695ddb612e7867ab92f | /dashboard/dashboard/pinpoint/models/quest/read_value.py | be0fda52522ba5143cfe3a50720310b2db79bc77 | [
"BSD-3-Clause"
] | permissive | takingmynetback/catapult | 49402759c34dd07e424b47f4c9ec824dd1744526 | f718fb12b8cfd16b07509674747abf56cf330ac8 | refs/heads/master | 2020-03-10T18:46:57.367789 | 2018-04-13T14:20:21 | 2018-04-13T15:06:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,183 | py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
from dashboard.common import histogram_helpers
from dashboard.pinpoint.models.quest import execution
from dashboard.pinpoint.models.quest import quest
from dashboard.services import isolate
from tracing.value import histogram_set
from tracing.value.diagnostics import diagnostic_ref
from tracing.value.diagnostics import reserved_infos
class ReadValueError(Exception):
pass
class ReadHistogramsJsonValue(quest.Quest):
def __init__(self, hist_name, tir_label=None, story=None, statistic=None):
self._hist_name = hist_name
self._tir_label = tir_label
self._story = story
self._statistic = statistic
def __eq__(self, other):
return (isinstance(other, type(self)) and
self._hist_name == other._hist_name and
self._tir_label == other._tir_label and
self._story == other._story and
self._statistic == other._statistic)
def __str__(self):
return 'Values'
def Start(self, change, isolate_hash):
del change
return _ReadHistogramsJsonValueExecution(self._hist_name, self._tir_label,
self._story, self._statistic,
isolate_hash)
@classmethod
def FromDict(cls, arguments):
chart = arguments.get('chart')
tir_label = arguments.get('tir_label')
trace = arguments.get('trace')
statistic = arguments.get('statistic')
return cls(chart, tir_label, trace, statistic)
class _ReadHistogramsJsonValueExecution(execution.Execution):
def __init__(self, hist_name, tir_label, story, statistic, isolate_hash):
super(_ReadHistogramsJsonValueExecution, self).__init__()
self._hist_name = hist_name
self._tir_label = tir_label
self._story = story
self._statistic = statistic
self._isolate_hash = isolate_hash
self._trace_urls = []
def _AsDict(self):
if not self._trace_urls:
return {}
return {'traces': self._trace_urls}
def _Poll(self):
# TODO(simonhatch): Switch this to use the new perf-output flag instead
# of the chartjson one. They're functionally equivalent, just new name.
histogram_dicts = _RetrieveOutputJson(
self._isolate_hash, 'chartjson-output.json')
histograms = histogram_set.HistogramSet()
histograms.ImportDicts(histogram_dicts)
histograms.ResolveRelatedHistograms()
matching_histograms = histograms.GetHistogramsNamed(self._hist_name)
# Get and cache any trace URLs.
unique_trace_urls = set()
for hist in histograms:
trace_urls = hist.diagnostics.get(reserved_infos.TRACE_URLS.name)
# TODO(simonhatch): Remove this sometime after May 2018. We had a
# brief period where the histograms generated by tests had invalid
# trace_urls diagnostics. If the diagnostic we get back is just a ref,
# then skip.
# https://github.com/catapult-project/catapult/issues/4243
if trace_urls and not isinstance(
trace_urls, diagnostic_ref.DiagnosticRef):
unique_trace_urls.update(trace_urls)
sorted_urls = sorted(unique_trace_urls)
self._trace_urls = [
{'name': t.split('/')[-1], 'url': t} for t in sorted_urls]
# Filter the histograms by tir_label and story. Getting either the
# tir_label or the story from a histogram involves pulling out and
# examining various diagnostics associated with the histogram.
tir_label = self._tir_label or ''
matching_histograms = [
h for h in matching_histograms
if tir_label == histogram_helpers.GetTIRLabelFromHistogram(h)]
# If no story is supplied, we're looking for a summary metric so just match
# on name and tir_label. This is equivalent to the chartjson condition that
# if no story is specified, look for "summary".
if self._story:
matching_histograms = [
h for h in matching_histograms
if self._story == _GetStoryFromHistogram(h)]
# Have to pull out either the raw sample values, or the statistic
result_values = []
for h in matching_histograms:
result_values.extend(self._GetValuesOrStatistic(h))
if not result_values and self._hist_name:
name = 'histogram: %s' % self._hist_name
if tir_label:
name += ' tir_label: %s' % tir_label
if self._story:
name += ' story: %s' % self._story
raise ReadValueError('Could not find values matching: %s' % name)
self._Complete(result_values=tuple(result_values))
def _GetValuesOrStatistic(self, hist):
if not self._statistic:
return hist.sample_values
if not hist.sample_values:
return []
# TODO(simonhatch): Use Histogram.getStatisticScalar when it's ported from
# js.
if self._statistic == 'avg':
return [hist.running.mean]
elif self._statistic == 'min':
return [hist.running.min]
elif self._statistic == 'max':
return [hist.running.max]
elif self._statistic == 'sum':
return [hist.running.sum]
elif self._statistic == 'std':
return [hist.running.stddev]
elif self._statistic == 'count':
return [hist.running.count]
raise ReadValueError('Unknown statistic type: %s' % self._statistic)
def _ResultValuesFromHistogram(buckets):
total_count = sum(bucket['count'] for bucket in buckets)
result_values = []
for bucket in buckets:
# TODO: Assumes the bucket is evenly distributed.
bucket_mean = (bucket['low'] + bucket.get('high', bucket['low'])) / 2
if total_count > 10000:
bucket_count = 10000 * bucket['count'] / total_count
else:
bucket_count = bucket['count']
result_values += [bucket_mean] * bucket_count
return tuple(result_values)
class ReadGraphJsonValue(quest.Quest):
def __init__(self, chart, trace):
self._chart = chart
self._trace = trace
def __eq__(self, other):
return (isinstance(other, type(self)) and
self._chart == other._chart and
self._trace == other._trace)
def __str__(self):
return 'Values'
def Start(self, change, isolate_hash):
del change
return _ReadGraphJsonValueExecution(self._chart, self._trace, isolate_hash)
@classmethod
def FromDict(cls, arguments):
chart = arguments.get('chart')
trace = arguments.get('trace')
if not (chart or trace):
return None
if chart and not trace:
raise TypeError('"chart" specified but no "trace" given.')
if trace and not chart:
raise TypeError('"trace" specified but no "chart" given.')
return cls(chart, trace)
class _ReadGraphJsonValueExecution(execution.Execution):
def __init__(self, chart, trace, isolate_hash):
super(_ReadGraphJsonValueExecution, self).__init__()
self._chart = chart
self._trace = trace
self._isolate_hash = isolate_hash
def _AsDict(self):
return {}
def _Poll(self):
graphjson = _RetrieveOutputJson(self._isolate_hash, 'chartjson-output.json')
if self._chart not in graphjson:
raise ReadValueError('The chart "%s" is not in the results.' %
self._chart)
if self._trace not in graphjson[self._chart]['traces']:
raise ReadValueError('The trace "%s" is not in the results.' %
self._trace)
result_value = float(graphjson[self._chart]['traces'][self._trace][0])
self._Complete(result_values=(result_value,))
def _RetrieveOutputJson(isolate_hash, filename):
# TODO: Plumb isolate_server through the parameters. crbug.com/822008
server = 'https://isolateserver.appspot.com'
output_files = json.loads(isolate.Retrieve(server, isolate_hash))['files']
if filename not in output_files:
raise ReadValueError("The test didn't produce %s." % filename)
output_json_isolate_hash = output_files[filename]['h']
return json.loads(isolate.Retrieve(server, output_json_isolate_hash))
def _GetStoryFromHistogram(hist):
stories = hist.diagnostics.get(reserved_infos.STORIES.name)
if stories and len(stories) == 1:
return list(stories)[0]
return None
| [
"[email protected]"
] | |
00ee6e8b2941d6e3cd3d1713cf36490b5754624e | 28cab1ef484a5796fc9b0897043e918f9a28e650 | /account/urls/user.py | caca4ac46ecc06a22eb78a92276ad522c397c750 | [] | no_license | bxxfighting/dalangshen | 12cb58d2078804327dbf7a01be0fc2a0d27f4495 | e174147b8778c188941d5fd0f5e33de65afc8b00 | refs/heads/main | 2023-01-15T08:07:57.429342 | 2020-11-16T03:49:34 | 2020-11-16T03:49:34 | 313,184,879 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 547 | py | from django.urls import path
from account.apis import user as user_api
urlpatterns = [
path('user/login/', user_api.LoginApi.as_view()),
path('user/logout/', user_api.LogoutApi.as_view()),
path('user/', user_api.UserApi.as_view()),
path('user/current/', user_api.CurrentUserApi.as_view()),
path('user/list/', user_api.ListUserApi.as_view()),
path('user/create/', user_api.CreateUserApi.as_view()),
path('user/update/', user_api.UpdateUserApi.as_view()),
path('user/delete/', user_api.DeleteUserApi.as_view()),
]
| [
"[email protected]"
] | |
8b4246df4e9e8bb970c0809d972016ef7188b9f1 | b8d7c4e3476aae5c3bba7ffa28311f84fda5af9e | /main/apps/cart/views.py | 0c406e7f828ea18dc57dda560f00ccd7024a78e5 | [] | no_license | zhongzhiqiang/hotel-api | 1744b8ecb63c4626f7a90f6f04f073aab052b312 | 25703713d0e8ab2314e07e983b98506a3551e762 | refs/heads/master | 2020-03-26T08:53:06.776003 | 2019-01-20T09:23:39 | 2019-01-20T09:23:39 | 144,724,134 | 0 | 0 | null | 2018-10-12T13:29:20 | 2018-08-14T13:28:51 | Python | UTF-8 | Python | false | false | 2,081 | py | # coding:utf-8
# Time : 2018/10/15 下午10:47
# Author : Zhongzq
# Site :
# File : views.py
# Software: PyCharm
from __future__ import unicode_literals
from rest_framework import mixins, viewsets, status
from rest_framework.response import Response
from rest_framework.decorators import list_route
from main.apps.cart import serializers
from main.models import Cart
class CartViews(mixins.CreateModelMixin,
mixins.ListModelMixin,
viewsets.GenericViewSet):
"""
create:
在自己的购物车新增一件商品
如果购物车有一样的商品会合并.如果传递数字小于等于0 则会删除
list:
返回当前用户的购物车
update:
更新购物车商品某个商品
empty_cart:
清空购物车。什么都不用传递。直接post
"""
queryset = Cart.objects.all()
serializer_class = serializers.CartSerializers
def get_queryset(self):
return self.queryset.filter(consumer=self.request.user.consumer)
def perform_create(self, serializer):
serializer.save(consumer=self.request.user.consumer)
def create(self, request, *args, **kwargs):
post_data = request.data
if post_data.get("nums") <= 0:
cart = self.queryset.filter(goods__id=post_data.get("goods")).first()
if cart:
cart.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
else:
return Response(status=status.HTTP_400_BAD_REQUEST, data={"non_field_errors": "传递错误"})
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
self.perform_create(serializer)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
@list_route(methods=["POST"])
def empty_cart(self, request, *args, **kwargs):
self.get_queryset().delete()
return Response(status=status.HTTP_204_NO_CONTENT)
| [
"[email protected]"
] | |
de499b1d1ee6eebeb74c84cbf98ec9a1e9bfa0ad | 84bd24e7aba23c7775f52d51c754f14601e28b61 | /cars/migrations/0015_auto_20201222_0911.py | 53cd0970e12348b27aef4a29fb5a55ef15ccf7ed | [] | no_license | hamzaumar8/sandvet | c0ad473e8f2f97d1c5bf5104e034e731ac0a0add | 7f02d24f1b50cd4f64beff618b6d9c508b7a42d4 | refs/heads/master | 2023-02-18T01:28:25.252360 | 2021-01-18T19:26:39 | 2021-01-18T19:26:39 | 310,844,181 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | # Generated by Django 3.0.6 on 2020-12-22 01:11
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cars', '0014_auto_20201222_0911'),
]
operations = [
migrations.AlterField(
model_name='schoolimage',
name='school',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='schoolimage', to='cars.School'),
),
]
| [
"[email protected]"
] | |
1cd7f7fe2262f547b545bce5583d232fd3056bcb | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/prepositions/_beside.py | 412a30036868081724ff1297f7950ab0b9365210 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py |
#calss header
class _BESIDE():
def __init__(self,):
self.name = "BESIDE"
self.definitions = [u'at the side of, next to: ', u'compared to another person or thing: ', u'to be in no way connected to the subject that is being discussed: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'prepositions'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
a807a1a843bf88cf36512e099d3aaca3261e2f3e | de9b8b7192a0a81e9249823bb2b86f0b7e452863 | /.history/main_20171106232335.py | 96c596fc3fcc185c546af76e74da5916cad83166 | [
"MIT"
] | permissive | reecebenson/uwe-dadsa-tennis-a | f5eaeb1b96d4e61f29279514e68eeea8ad6533db | d0763f819b300fcd0ce27041f5bc4ef0519c00bf | refs/heads/master | 2023-07-08T16:13:23.963348 | 2017-11-30T12:07:01 | 2017-11-30T12:07:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,056 | py | # DADSA - Assignment 1
# Reece Benson
import random
from classes import Menu as Menu
from classes import Handler as Handler
class App():
# Define the variables we will be using
debug = True
handler = None
# Define all of the properties we will need to use
def __init__(self):
# Load our handler
self.handler = Handler.Handler(self)
self.handler.load()
# Generate rounds
self.generate_rounds()
# Hold the program
self.exit()
# Generate our rounds from our player list from scratch
def generate_rounds(self):
# Write our new data to memory
for seasonId in self.handler.get_seasons():
season = self.handler.get_season(seasonId)
players = season.players()
# Our Round Data should be completely empty
round_data = { }
# Generate our rounds
for gender in players:
# Generate 'x' amount of rounds
for r in range(season.settings()['round_count']):
# Default Round Cap
round_cap = 3
# Create our gendered rounds
if(not gender in round_data):
# Do we have a Round Cap overrider for this gender?
if(gender + "_cap" in season.settings()):
roundCap = season.settings()[gender + "_cap"]
# Update our round data
round_data.update({ "round_"+str(r): { gender: [ { "_roundCap": round_cap } ] } })
# Create our match data from players
rand_players = random.sample(players[gender], len(players[gender]))
for i in range(int(len(rand_players) / 2 )):
# Grab our versus players
p_one = rand_players[i * 2]
p_two = rand_players[(i * 2) + 1]
# Generate some scores
p_one_score = random.randint(0, round_cap - 1)
p_two_score = random.randint(0, round_cap - 1)
# Make a random player the winner
who = random.randint(0, 1)
if(who == 0): p_one_score = round_cap
else: p_two_score = round_cap
# Append our random data as a Match
#round_data[gender].append({ p_one.name(): p_one_score, p_two.name(): p_two_score })
round_data[gender]["round_"+str(r)].append(Match.Match("round_"+str(r), p_one, p_two, p_one_score, p_two_score))
# Set our Round Data to our season
season.set_rounds_raw(round_data)
# End of generate_rounds()
# A method which exits the program after the user has pressed the Return key
def exit(self):
input(">>> Press <Return> to terminate the program")
exit()
App() | [
"[email protected]"
] | |
9bac4c8027b6b8102d2288a4ae7b4d617d5fded3 | 0fccee4c738449f5e0a8f52ea5acabf51db0e910 | /genfragments/EightTeV/LongLivedChi0ToMuQQ_MSquark_1500_MChi_494_TuneZ2Star_8TeV_pythia6_cff.py | 969c146bfb5a013812585e4739862cc042491409 | [] | no_license | cms-sw/genproductions | f308ffaf3586c19b29853db40e6d662e937940ff | dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4 | refs/heads/master | 2023-08-30T17:26:02.581596 | 2023-08-29T14:53:43 | 2023-08-29T14:53:43 | 11,424,867 | 69 | 987 | null | 2023-09-14T12:41:28 | 2013-07-15T14:18:33 | Python | UTF-8 | Python | false | false | 2,634 | py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.PythiaUEZ2starSettings_cfi import *
source = cms.Source("EmptySource")
generator = cms.EDFilter("Pythia6GeneratorFilter",
pythiaHepMCVerbosity = cms.untracked.bool(False),
maxEventsToPrint = cms.untracked.int32(0),
pythiaPylistVerbosity = cms.untracked.int32(0),
filterEfficiency = cms.untracked.double(1.0),
crossSection = cms.untracked.double(0.0001388),
comEnergy = cms.double(8000.0),
UseExternalGenerators = cms.untracked.bool(False),
PythiaParameters = cms.PSet(
pythiaUESettingsBlock,
pythiaParameters = cms.vstring(
'MSTJ(22)=1 ! Decay all unstable particles',
'MSTP(95)=0 ! Disable colour reconnection, since it can put colour strings between widely separated partons',
'MSEL=0',
'MSUB(271)=1 ! Squark pair production',
'MSUB(272)=1',
'MSUB(273)=1',
'MSUB(274)=1',
'MSUB(275)=1',
'MSUB(276)=1',
'MSUB(277)=1',
'MSUB(278)=1',
'MSUB(279)=1',
'MSUB(280)=1',
'IMSS(1)=1 ! General MSSM simultaion',
'RMSS(2)=5000. ! M2 mass',
'RMSS(3)=5000. ! M3 mass',
'RMSS(4)=800. ! mu parameter',
'RMSS(5)=2. ! tan Beta',
'RMSS(6)=5000. ! Left slepton mass',
'RMSS(7)=5000. ! Right slepton mass',
'RMSS(10)=5000. ! Left squark mass for third generation',
'RMSS(11)=5000. ! Right sbottom mass',
'RMSS(12)=5000. ! Right stop mass',
'RMSS(13)=5000. ! Left stau mass',
'RMSS(14)=5000. ! Right stau mass',
'IMSS(52)=3 ! Turn on Lepton number violating LQD decay channels with all couplings set to zero',
'RVLAMP(2,1,1)=0.00001 ! Set lambda Prime(2,1,1)',
'MDME(2241,1)=0 ! Turn off LQD decays to neutrinos',
'MDME(2242,1)=0 ! Turn off LQD decays to neutrinos',
'RMSS(1)=500 ! M1 mass',
'RMSS(8)=1500 ! Left squark mass',
'RMSS(9)=1500 ! Right squark mass'
),
parameterSets = cms.vstring('pythiaUESettings',
'pythiaParameters')
)
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"[email protected]"
] | |
e64c699df93ca5619fa36bd10f267b0786259b19 | b6b28e1588050597366907223bfcb71464d76734 | /lr/minibatch_sgd/data_process/read_data.py | 8f8d526e7b6cb9a6d1cef6757b7a8cac94cc8fb5 | [
"MIT"
] | permissive | DiracSea/project-sxl | ea8af63643a2547493c32c83dc297180c072bd01 | f458bec818d55f80a5eda461316a22d843fef753 | refs/heads/master | 2020-03-10T04:08:42.466142 | 2018-05-20T05:03:13 | 2018-05-20T05:03:13 | 129,184,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,868 | py | import numpy as np
import random
from .db import *
from .tool.combine import first_stack
from .tool.sperate import split_num
#all array
def read_rand_data(batchsize,table,db):#yield batch
rand = conn_rand(db,table,'112.74.45.185',3306,'root','opal123456!@#')###
counter = 0
size = 0
train_num, valid_num = split_num(batchsize)
for row in rand.export():
if size%batchsize == 0:
T = [];V = [];flag = 1;flag1 = 1
if row:
row = np.array(row)
counter += 1
size += 1
if size%batchsize != 0:
if counter%batchsize < train_num:
T,flag = first_stack(T,row,flag)
else:
V,flag1 = first_stack(V,row,flag1)
else:
yield T,V
else:
yield T,V
def del_label(table,db):
block = conn_block(db,table,'112.74.45.185',3306,'root','opal123456!@#')###
for b in block.export():
a = np.array(b)
yield a[:,1:]
def read_single_block(blank,table,db):
for block in del_label(table,db):
if block!= np.array([]):
batchsize = len(block)
train_num, valid_num = split_num(batchsize)
T = [];V = [];flag = 1;flag1 = 1
np.random.shuffle(block)
counter = 0
for row in block:
counter += 1
if counter%batchsize < train_num:
T,flag = first_stack(T,row,flag)
else:
V,flag1 = first_stack(V,row,flag1)
yield T,V#batch
def read_all_block(table,db):
for block in del_label(table,db):
if block!= np.array([]):
seed = int(random.random()*10)
if(seed < 7):
yield block,"train"
else:
yield block,"valid"
| [
"[email protected]"
] | |
61b7547ed5510ee1d2ee0d78be17f4572f61d01e | 1d717c797e93b451f7da7c810a0fb4075b1050d5 | /src/preprocessors/naive_preprocessor.py | 246a888db14a6870701bf64b6726d191337ee985 | [] | no_license | jessie0624/nlp-task | 32338b08051a3ea192db2bf74c9c969bdff1f6ad | aaeeed86341356d9fd061664f6f7bccf2ac353d0 | refs/heads/master | 2023-01-24T12:06:13.323646 | 2020-12-10T08:38:23 | 2020-12-10T08:38:23 | 292,151,135 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,565 | py | '''
@description: 最简单的预处理
'''
import pandas as pd
from tqdm import tqdm
from src.base import BasePreprocessor, units
from src.preprocessors import apply_on_df_columns
from src.tools.build_unit import build_vocab_unit, chain_transform
tqdm.pandas()
class NaivePreprocessor(BasePreprocessor):
"""Define Naive preprocessors"""
def fit(self, data: pd.DataFrame, columns: list, verbose: int=1):
func = chain_transform(self._default_units())
# 应用所有的是转换
data = apply_on_df_columns(data, columns, func, verbose=verbose)
vocab_unit = build_vocab_unit(data, columns=columns, verbose=verbose)
self._context['vocab_unit'] = vocab_unit
return self
def transform(self, data: pd.DataFrame, columns: list,
verbose: int=1) -> pd.DataFrame:
"""
Apply transformation on data, create truncated length, representation.
"""
units_ = self._default_units()
units_.append(self._context['vocab_unit'])
units_.append(
units.TruncatedLength(text_length=30, truncate_mode='post')
)
func = chain_transform(units_)
data = apply_on_df_columns(data, columns, func, verbose=verbose)
for col in columns:
data[col+'_len'] = data[col].apply(len)
empty_id = data[data[col+'_len'] == 0].index.tolist()
data.drop(index=empty_id, axis=0, inplace=True)
data.dropna(axis=0, inplace=True)
data.reset_index(drop=True, inplace=True)
return data | [
"[email protected]"
] | |
0f88316bf11c35e936d8f86e044b31b12973dbe9 | 43f0c93802ef62c03388006cdae18c62de4d3295 | /setup.py | 524d362968902bc8a4e648bf8419ebe2c4b0c37a | [
"MIT"
] | permissive | pombredanne/qtstyles | e05f67f4d0f58284ae5b5c50909f23090f5bf278 | de962879e36be305572b0c5fb5c4ddcfeda5afe0 | refs/heads/master | 2020-04-27T00:58:55.044676 | 2018-10-20T05:19:33 | 2018-10-20T05:19:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | from setuptools import setup, find_packages
setup(name="qtstyles",
version="0.0.2",
install_requires=[
"QtPy>=1.4.1"
],
description="A collection of Qt style sheets and helpful classes for applying them.",
long_description=open("README.md").read(),
# https://setuptools.readthedocs.io/en/latest/setuptools.html#including-data-files
package_data={"qtstyles": ["style_sheets/*.qss"]}, # include style sheets
author="Simon Garisch",
author_email="[email protected]",
url="https://github.com/simongarisch/qtstyles",
packages=find_packages()
)
| [
"[email protected]"
] | |
cb448cc57982cd1d11cc353decfa6f00bac6d2d2 | 35e6605da2d105158d4ce3aa8230f650ba965651 | /v7/meta_template/meta_template.py | b1eb9b573778b911af44b802b56501e7968fc974 | [
"MIT",
"BSD-3-Clause",
"Apache-2.0",
"BSD-2-Clause",
"LGPL-2.0-or-later",
"LicenseRef-scancode-free-unknown",
"GPL-1.0-or-later"
] | permissive | getnikola/plugins | 8a24d00d9ca17ef075c49925d9945b059eeed849 | 9de663884ba5f15153d37e527ade6f55e42661a3 | refs/heads/master | 2023-08-29T23:38:25.184763 | 2023-08-06T12:58:33 | 2023-08-06T12:58:33 | 13,049,233 | 62 | 104 | MIT | 2023-08-06T12:55:44 | 2013-09-23T22:50:59 | Python | UTF-8 | Python | false | false | 2,442 | py | # -*- coding: utf-8 -*-
# Copyright © 2016 Manuel Kaufmann
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
from docutils import nodes
from docutils.parsers.rst import Directive, directives
from nikola.plugin_categories import RestExtension
class Plugin(RestExtension):
name = "meta_template"
def set_site(self, site):
self.site = site
MetaTemplate.site = site
return super(Plugin, self).set_site(site)
class MetaTemplate(Directive):
""" Restructured text extension for inserting custom templates."""
option_spec = {
'title': directives.unchanged,
'href': directives.unchanged,
'url': directives.unchanged,
'target': directives.unchanged,
'src': directives.unchanged,
'style': directives.unchanged,
}
has_content = True
required_arguments = 1
optional_arguments = 0
def __init__(self, *args, **kwargs):
super(MetaTemplate, self).__init__(*args, **kwargs)
def run(self):
template_name = self.arguments[0] + '.tmpl'
self.options.update({
'content': self.content,
})
output = self.site.template_system.render_template(
template_name,
None,
self.options,
)
return [nodes.raw('', output, format='html')]
directives.register_directive('template', MetaTemplate)
| [
"[email protected]"
] | |
dff2c4c6b24ea68093845fe8c8cc96b6c0b00eb6 | 4f7962d02254ab6e5cf692648c933394ff41c79d | /component_sdk/python/tests/google/bigquery/test__query.py | 06d91a42747f7c24d3454014f3d87a395c35ebae | [
"Apache-2.0"
] | permissive | yebrahim/pipelines | 5414131f5ab176aa7607114e3a0d23db73f5c8c8 | 77df6c2438f4cf6b81c97ecf4dac9fdbac0e3132 | refs/heads/master | 2020-04-08T13:23:50.628537 | 2019-03-01T18:35:47 | 2019-03-01T18:35:47 | 159,389,183 | 1 | 0 | Apache-2.0 | 2018-11-27T19:37:57 | 2018-11-27T19:37:56 | null | UTF-8 | Python | false | false | 2,545 | py | # you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
from google.cloud import bigquery
from google.api_core import exceptions
from kfp_component.google.bigquery import query
CREATE_JOB_MODULE = 'kfp_component.google.bigquery._query'
@mock.patch(CREATE_JOB_MODULE + '.display.display')
@mock.patch(CREATE_JOB_MODULE + '.gcp_common.dump_file')
@mock.patch(CREATE_JOB_MODULE + '.KfpExecutionContext')
@mock.patch(CREATE_JOB_MODULE + '.bigquery.Client')
class TestQuery(unittest.TestCase):
def test_create_job_succeed(self, mock_client,
mock_kfp_context, mock_dump_json, mock_display):
mock_kfp_context().__enter__().context_id.return_value = 'ctx1'
mock_client().get_job.side_effect = exceptions.NotFound('not found')
mock_response = {
'configuration': {
'query': {
'query': 'SELECT * FROM table_1'
}
}
}
mock_client().query.return_value.to_api_repr.return_value = mock_response
mock_dataset = bigquery.DatasetReference('project-1', 'dataset-1')
mock_client().dataset.return_value = mock_dataset
result = query('SELECT * FROM table_1', 'project-1', 'dataset-1',
output_gcs_path='gs://output/path')
self.assertEqual(mock_response, result)
expected_job_config = bigquery.QueryJobConfig()
expected_job_config.create_disposition = bigquery.job.CreateDisposition.CREATE_IF_NEEDED
expected_job_config.write_disposition = bigquery.job.WriteDisposition.WRITE_TRUNCATE
expected_job_config.destination = mock_dataset.table('table_ctx1')
mock_client().query.assert_called_with('SELECT * FROM table_1',mock.ANY,
job_id = 'query_ctx1')
actual_job_config = mock_client().query.call_args_list[0][0][1]
self.assertDictEqual(
expected_job_config.to_api_repr(),
actual_job_config.to_api_repr()
)
mock_client().extract_table.assert_called_with(
mock_dataset.table('table_ctx1'),
'gs://output/path')
| [
"[email protected]"
] | |
7ca9ac0a216728a647c1da58e0b311e1690ce6e1 | 922a4f63f71e8833ecb240387d675ddfddf13845 | /PythonProgrammingAssignmentsII/Q20.py | 042c6100010d3493d297cb3fed73f20a55511bfb | [] | no_license | asmitbhantana/Insight-Workshop | 0ed9e6de49dc15f0447166227f404f108ffaad2e | 54f9ce92fe47a01b08440d20aa850dfc97fa0423 | refs/heads/master | 2022-11-19T19:14:56.557014 | 2020-07-24T07:32:12 | 2020-07-24T07:32:12 | 275,709,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 952 | py | """
20. Write a Python class to find the three elements that sum to zero
from a list of n real numbers.
Input array : [-25, -10, -7, -3, 2, 4, 8, 10]
Output : [[-10, 2, 8], [-7, -3, 10]]
"""
if __name__ == '__main__':
usr_list = [-25, -10, -7, -3, 2, 4, 8, 10]
required_result = 0
usr_list.sort()
required_result_num_list = []
for i in range(len(usr_list)):
if usr_list[i] >= required_result:
break
for j in range(i + 1, len(usr_list)):
if usr_list[i]+usr_list[j] >= required_result:
break
for k in range(j + 1, len(usr_list)):
c_sum = usr_list[i] + usr_list[j] + usr_list[k]
if c_sum > required_result:
break
elif c_sum == required_result:
required_result_num_list.append([usr_list[i], usr_list[j], usr_list[k]])
break
print(required_result_num_list) | [
"[email protected]"
] | |
99cd86f3d8ff4704dcb4b37bf6424a04ccda5c61 | f07a42f652f46106dee4749277d41c302e2b7406 | /Data Set/bug-fixing-5/4c0889e8fcee6c8be9fef33887480747c227725d-<cmd_build>-bug.py | 435761d91c7ec686f1c2a16c517b74393ddf97ed | [] | no_license | wsgan001/PyFPattern | e0fe06341cc5d51b3ad0fe29b84098d140ed54d1 | cc347e32745f99c0cd95e79a18ddacc4574d7faa | refs/heads/main | 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,173 | py | def cmd_build(self):
result = dict(changed=False, actions=[])
if (not self.check_mode):
for service in self.project.get_services(self.services, include_deps=False):
if service.can_be_built():
self.log(('Building image for service %s' % service.name))
old_image_id = ''
try:
image = service.image()
if (image and image.get('Id')):
old_image_id = image['Id']
except NoSuchImageError:
pass
except Exception as exc:
self.client.fail(('Error: service image lookup failed - %s' % str(exc)))
try:
new_image_id = service.build(pull=True, no_cache=self.nocache)
except Exception as exc:
self.client.fail(('Error: build failed with %s' % str(exc)))
if (new_image_id not in old_image_id):
result['changed'] = True
result['actions'].append(dict(service=service.name, built_image=dict(name=service.image_name, id=new_image_id)))
return result | [
"[email protected]"
] | |
b38b46fd26f5a49bcaa3e1b5de0b4f3f25a2e70a | d272b041f84bbd18fd65a48b42e0158ef6cceb20 | /catch/datasets/gyrovirus_gyv7-sf.py | 5cf9a7265da81fb03c45e238b7ce53b151c9e6c3 | [
"MIT"
] | permissive | jahanshah/catch | bbffeadd4113251cc2b2ec9893e3d014608896ce | 2fedca15f921116f580de8b2ae7ac9972932e59e | refs/heads/master | 2023-02-19T13:30:13.677960 | 2021-01-26T03:41:10 | 2021-01-26T03:41:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | """Dataset with 'Gyrovirus GyV7-SF' sequences.
A dataset with 1 'Gyrovirus GyV7-SF' genomes.
THIS PYTHON FILE WAS GENERATED BY A COMPUTER PROGRAM! DO NOT EDIT!
"""
import sys
from catch.datasets import GenomesDatasetSingleChrom
ds = GenomesDatasetSingleChrom(__name__, __file__, __spec__)
ds.add_fasta_path("data/gyrovirus_gyv7-sf.fasta.gz", relative=True)
sys.modules[__name__] = ds
| [
"[email protected]"
] | |
72c1be6bcfb8580304d1dc0d10de7f18699c9b28 | ec19603130dddeb4b8298ee020965030d66edc81 | /src/networkService/servicos/informacao/informacaoQt.py | 19fd21829654effa14a3be1fdce01111d7712a16 | [] | no_license | tassio/NetworkService | 9a5f08c0e3b92cbe34fc99c36e80f57fcbd258f0 | f800d48d8af94bf8d927fd440eab7a1c40296066 | refs/heads/master | 2016-09-09T23:33:14.584056 | 2012-12-13T15:06:24 | 2012-12-13T15:06:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,885 | py | #-*- coding: utf-8 -*-
from PyQt4.QtNetwork import QNetworkCacheMetaData, QHostAddress
from PyQt4.QtGui import QColor, QBrush, QCursor, QFont, QIcon, QImage, QKeySequence, QListWidgetItem, QMatrix, \
QPainterPath, QPen, QPicture, QPixmap, QPolygon, QPolygonF, QQuaternion, QRegion, QSizePolicy, QStandardItem, \
QTableWidgetItem, QTextLength, QTextFormat, QTransform, QTreeWidgetItem, QVector2D, QVector3D, QVector4D
from PyQt4.QtCore import QUuid, QUrl, QSize, QSizeF, QRegExp, QRectF, QRect, QPoint, QPointF, QLocale, QLine, \
QLineF, QDateTime, QTime, QDate, QByteArray, QBitArray
from networkService.servicos.informacao.informacao import InformacaoAbstrata
from networkService.servicos.informacao.dataManipulador import DataManipulador
from networkService.servicos.informacao.registroInformacao import RegistroInformacao
@RegistroInformacao.addInformacaoHandler(
QColor, QNetworkCacheMetaData, QBrush, QHostAddress, QCursor,
QFont, QIcon, QImage, QKeySequence, QListWidgetItem, QMatrix,
QPainterPath, QPen, QPicture, QPixmap,
QPolygonF, QPolygon, QQuaternion, QRegion, QSizePolicy,
QStandardItem, QTableWidgetItem, QTextLength, QTextFormat,
QTransform, QTreeWidgetItem, QVector2D, QVector3D, QVector4D,
QUuid, QUrl, QSizeF, QSize, QRegExp, QRectF, QRect, QPointF,
QPoint, QLocale, QLineF, QLine, QDateTime, QTime, QDate,
QByteArray, QBitArray
)
class QInformacao(InformacaoAbstrata):
"""Classe que guarda qualquer classe do Qt que possa ser serializada e tenha um construtor sem parametros"""
def __lshift__(self, data):
nomeClasse = DataManipulador(data).getNextInstance()
self.valor = eval(nomeClasse)()
data >> self.valor
def __rshift__(self, data):
DataManipulador(data).addInstance(self.valor.__class__.__name__)
data << self.valor
| [
"[email protected]"
] | |
129863d00cccb8a19b5adbe2d12eaf8deed86c74 | bebe65ae5ea5d15eca9a388ddf86ca5b352762a6 | /bin/bubbles | 5b12642f3e732cd6140f54e107df357d82c1eebb | [
"MIT",
"LicenseRef-scancode-saas-mit"
] | permissive | biswapanda/bubbles | f65aa11b129cf272be1205ef1fd8f885b215216d | 6c6bd7b378e53bc0edcbbb35c2211922e1cb2100 | refs/heads/master | 2021-01-17T19:16:58.100977 | 2013-12-08T21:31:21 | 2013-12-08T21:31:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,839 | #! /usr/bin/env python3
#
"""
Bubbles command line tool
For more information run: bubbles --help
Paths:
* /etc/bubbles:~/.bubbles:.bubbles
Config:
* config.ini
Author: Stefan Urbanek <[email protected]>
"""
import argparse
import json
import sys
import configparser
import os.path
import argparse
import re
from bubbles import *
class ToolError(Exception):
"""Just exception"""
pass
CONFIG_PATHS = ['/etc/bubbles', \
'~/.bubbles', \
'./bubbles']
def load_config(args):
paths = CONFIG_PATHS + (args.config if args.config else [])
config = configparser.SafeConfigParser()
for path in paths:
config_file = os.path.join(path, "config.ini")
if os.path.exists(config_file):
config.read(config_file)
if config.has_section("extensions"):
extensions = config.options("extensions")
for extension in extensions:
mod_name = config.get("extensions", extension)
import_extension(extension, mod_name)
def import_extension(extension_name, module_name=None):
"""Imports a bubbles tool extension from module `module_name`.
Note: extension name is not used yet module_name is specified. Might be used in the future to allow
different modules replace extensions with same name.
"""
# FIXME: this is from brewery tool
module = __import__(module_name or extension_name)
def create_context(args):
if args.empty:
context = OperationContext()
else:
context = default_context
# Dummy request for an operation - forces automatic loading
context.operation_list("distinct")
modules = args.module or []
for name in modules:
module = __import__(name)
context.add_operations_from(mod)
return context
def opcatalogue(context, args):
"""Print all operations in the context."""
keys = list(context.operations.keys())
keys.sort()
reps = set(args.representation)
selection = []
# Select only operations with signatures matching reps
for opname in keys:
ops = context.operations[opname]
for op in ops:
if not reps or reps and (reps & set(op.signature.signature)):
selection.append(opname)
for opname in selection:
print("%s" % opname)
if args.signatures:
ops = context.operations[opname]
for op in ops:
if not reps or reps and (reps & set(op.signature.signature)):
sig = ", ".join(op.signature.signature)
print(" (%s)" % sig)
def run_pipe(args):
# Collect operations
pattern = re.compile(r"^(\w+)=(.*)")
templates = []
attribs = {}
current = None
# Cllect nodes and attributes
#
# node name pattern: node_name
# attribute pattern: attribute=value
#
for arg in args.node:
match = pattern.match(arg)
if match:
(attribute, value) = match.groups()
attribs[attribute] = value
else:
if current:
templates.append( (current, attribs) )
attribs = {}
current = arg
if current:
templates.append( (current, attribs) )
pipe = Pipeline()
nodes = []
for template, attribs in templates:
try:
node = brewery.nodes.create_node(template)
except KeyError:
sys.stderr.write("ERROR: unknown node %s\n" % template)
exit(1)
node.configure(attribs)
stream.add(node)
nodes.append(node)
if last_node:
stream.connect(last_node, node)
last_node = node
# If first node is not source node, then we add CSV node with standard
# input
if not isinstance(nodes[0], brewery.nodes.SourceNode):
node = brewery.nodes.create_node("csv_source")
node.resource = sys.stdin
stream.add(node)
stream.connect(node, nodes[0])
if not isinstance(nodes[-1], brewery.nodes.TargetNode):
node = brewery.nodes.create_node("csv_target")
node.resource = sys.stdout
stream.add(node)
stream.connect(nodes[-1], node)
stream.run()
################################################################################
# Main code
main_parser = argparse.ArgumentParser(description='Bubbles command lite tool')
main_parser.add_argument('--config',
action='append',
help='bubbles configuration file')
main_parser.add_argument('-m', '--module',
action='append',
help='list of python modules to be loaded and inspected '
'for potential operations')
main_parser.add_argument('--empty',
action='store_true',
help='start with empty context (requires -m)')
subparsers = main_parser.add_subparsers(title='commands', help='additional help')
################################################################################
# Command: operation catalogue
op_parser = subparsers.add_parser('op')
op_subparsers = op_parser.add_subparsers(title='operation commands')
subparser = op_subparsers.add_parser("list", help = "list available operations")
subparser.add_argument('-r', '--representation', action='append',
help="show operations having specified rep in signature")
subparser.add_argument('--signatures', action='store_true',
help="show also operation signatures")
subparser.set_defaults(func=opcatalogue)
################################################################################
# Command: pipe
subparser = subparsers.add_parser('pipe',
help="create a simple Brewery node pipe",
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
There should be at least one operation specified.
The arguments are either operations or operation
arguments. Attributes follow node name and have format:
attribute=value
If there is no source node, then CSV source on standard
input is assumed. If there is no target node, then CSV
target on standard output is assumed.
''')
)
subparser.add_argument('op', nargs="+", help='list of operations')
subparser.set_defaults(func=run_pipe)
#
args = main_parser.parse_args(sys.argv[1:])
context = create_context(args)
load_config(args)
if "func" in args:
try:
args.func(context, args)
except ToolError as e:
sys.stderr.write("Error: %s" % str(e))
exit(1)
else:
main_parser.print_help()
| [
"[email protected]"
] | ||
bc370ab02f4412d3115dff750ed79fd6ada8e58e | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/I_w_M_to_W_focus_Zok_div/ch096/wiColorJ/Add2Loss/Sob_k25_s001_EroM_Mae_s001/pyr_Tcrop255_p20_j15/pyr_4s/L8/step10_a.py | 6e74bc48fa90b826fdfd8e45ba9c7b2b91a824a4 | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270,666 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
code_dir = "\\".join(code_exe_path_element[:-1])
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
sys.path.append(code_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" code_dir:", code_dir)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir) ### 舉例: template_dir: 7_mask_unet/5_os_book_and_paper_have_dtd_hdr_mix_bg_tv_s04_mae
#############################################################################################################################################################################################################
exp_dir = template_dir
#############################################################################################################################################################################################################
from step06_a_datas_obj import *
from step09_4side_L8 import *
from step10_a2_loss_info_obj import *
from step10_b2_exp_builder import Exp_builder
rm_paths = [path for path in sys.path if code_dir in path]
for rm_path in rm_paths: sys.path.remove(rm_path)
rm_moduless = [module for module in sys.modules if "step09" in module]
for rm_module in rm_moduless: del sys.modules[rm_module]
#############################################################################################################################################################################################################
'''
exp_dir 是 決定 result_dir 的 "上一層"資料夾 名字喔! exp_dir要巢狀也沒問題~
比如:exp_dir = "6_mask_unet/自己命的名字",那 result_dir 就都在:
6_mask_unet/自己命的名字/result_a
6_mask_unet/自己命的名字/result_b
6_mask_unet/自己命的名字/...
'''
use_db_obj = type8_blender_kong_doc3d_in_I_gt_W_ch_norm_v2
use_loss_obj = [mae_s001_sobel_k25_s001_EroseM_loss_info_builder.set_loss_target("UNet_Wz").copy(), mae_s001_sobel_k25_s001_EroseM_loss_info_builder.set_loss_target("UNet_Wy").copy(), mae_s001_sobel_k25_s001_EroseM_loss_info_builder.set_loss_target("UNet_Wx").copy()] ### z, y, x 順序是看 step07_b_0b_Multi_UNet 來對應的喔
#############################################################
### 為了resul_analyze畫空白的圖,建一個empty的 Exp_builder
empty = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_1__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_1__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="為了resul_analyze畫空白的圖,建一個empty的 Exp_builder")
#############################################################
# "1" 3 6 10 15 21 28 36 45 55
# side1 OK 1
ch032_limit_1side_1__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_1__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_1__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# side2 OK 4
ch032_limit_1side_2__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_2__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_2__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_2__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_2__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_2__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_2__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_2__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_2__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_2__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_2__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_2__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
ch032_limit_1side_3__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_3__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_3__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_3__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_3__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_3__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_3__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_3__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_3__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_3__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_3__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_3__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_3__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_3__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_3__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_3__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_3__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_3__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_3__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_3__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_3__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_3__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_3__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_3__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_3__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_3__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_3__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_3__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_3__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_3__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
ch032_limit_1side_4__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_4__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_4__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_4__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_4__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_4__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_4__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_4__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_4__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_4__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_4__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_4__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_4__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_4__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_4__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_4__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_4__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_4__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_4__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_4__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_4__2side_4__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_4__2side_4__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
ch032_limit_1side_5__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_4__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_4__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_4__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_4__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_4__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_4__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_4__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_4__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_4__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_4__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_4__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_4__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_4__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_4__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_4__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_4__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_4__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_4__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_4__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_4__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_5__2side_5__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_5__2side_5__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_5__2side_5__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 15 "21" 28 36 45 55
# side6 OK 56
ch032_limit_1side_6__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_4__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_4__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_4__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_4__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_4__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_4__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_4__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_4__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_4__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_4__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_4__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_4__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_4__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_4__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_4__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_4__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_4__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_4__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_4__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_4__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_5__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_5__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_5__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_6__2side_6__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_6__2side_6__3side_6_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 15 21 "28" 36 45 55
# side7 OK 84
ch032_limit_1side_7__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_4__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_4__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_4__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_4__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_4__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_4__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_4__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_4__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_4__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_4__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_4__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_4__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_4__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_4__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_4__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_4__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_4__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_4__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_4__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_4__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_5__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_5__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_5__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_6__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_6__3side_6_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_6_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_7_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_7_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_7_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_7_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_7_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_7_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_7__2side_7__3side_7_4side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_7, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_7__2side_7__3side_7_4side_7.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 15 21 28 "36" 45 55
# side8 OK 120
ch032_limit_1side_8__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_4__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_4__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_4__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_4__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_4__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_4__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_4__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_4__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_4__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_4__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_4__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_4__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_4__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_4__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_4__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_4__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_4__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_4__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_4__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_4__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_5__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_5__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_5__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_6__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_6__3side_6_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_6_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_7_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_7_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_7_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_7_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_7_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_7_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_7__3side_7_4side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_7, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_7__3side_7_4side_7.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_6_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_7_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_7_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_7_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_7_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_7_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_7_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_7_4side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_7, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_7_4side_7.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_8_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_8_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_8_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_8_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_8_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_8_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_8_4side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_7, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_7.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_8__2side_8__3side_8_4side_8 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_8, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_8__2side_8__3side_8_4side_8.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 15 21 28 36 "45" 55
# side9 OK 165
ch032_limit_1side_9__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_1__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_2__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_2__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_2__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_2__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_2__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_2__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_3__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_3__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_3__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_3__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_3__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_3__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_3__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_3__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_3__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_3__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_3__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_3__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_4__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_4__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_4__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_4__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_4__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_4__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_4__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_4__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_4__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_4__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_4__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_4__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_4__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_4__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_4__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_4__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_4__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_4__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_4__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_4__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_5__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_5__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_5__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_6__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_6__3side_6_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_6_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_7_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_7_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_7_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_7_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_7_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_7_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_7__3side_7_4side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_7, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_7__3side_7_4side_7.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_6_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_7_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_7_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_7_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_7_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_7_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_7_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_7_4side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_7, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_7_4side_7.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_8_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_8_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_8_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_8_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_8_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_8_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_8_4side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_7, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_7.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_8_4side_8 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_8, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_8.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_1_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_2_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_2_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_2_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_2_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_3_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_3_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_3_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_3_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_3_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_3_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_4_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_4_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_4_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_4_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_4_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_4_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_4_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_4_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_5_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_5_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_5_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_5_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_5_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_5_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_5_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_5_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_5_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_5_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_6_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_7_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_7_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_7_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_7_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_7_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_7_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_7_4side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_7, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_7_4side_7.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_8_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_8__3side_8_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_8__3side_8_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_8_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_8_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_8_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_8_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_8_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_8_4side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_7, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_7.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_8_4side_8 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_8, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_8_4side_8.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_9_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_9_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_9_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_9_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_9_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_9_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_9_4side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_7, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_7.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_9_4side_8 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_8, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_8.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_limit_1side_9__2side_9__3side_9_4side_9 = Exp_builder().set_basic("train", use_db_obj, ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_9, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_limit_pyramid_1side_9__2side_9__3side_9_4side_9.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
#############################################################
if(__name__ == "__main__"):
print("build exps cost time:", time.time() - start_time)
if len(sys.argv) < 2:
############################################################################################################
### 直接按 F5 或打 python step10_b1_exp_obj_load_and_train_and_test.py,後面沒有接東西喔!才不會跑到下面給 step10_b_subprocss.py 用的程式碼~~~
ch032_limit_1side_1__2side_1__3side_1_4side_1.build().run()
# print('no argument')
sys.exit()
### 以下是給 step10_b_subprocess.py 用的,相當於cmd打 python step10_b1_exp_obj_load_and_train_and_test.py 某個exp.build().run()
eval(sys.argv[1])
| [
"[email protected]"
] | |
5a1ad840edc1d0ca68d0087d4ec58a1799c74647 | 6843258fe430c67ffa01e909d1650df390369d93 | /errata_tool/__init__.py | d0efa742971710c04b6bc0d10dfd7c1f99727682 | [
"MIT"
] | permissive | ralphbean/errata-tool | 86df0c5a5bdd65d62e01653f003ac2ecf3e2f092 | d7f999a34ef2780e5218b071a4cd99b35b8702de | refs/heads/master | 2021-05-15T05:34:04.761851 | 2017-11-18T03:59:38 | 2017-12-14T21:19:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 191 | py | from .exception import ErrataException
from .connector import ErrataConnector
from .erratum import Erratum
__all__ = ['ErrataException', 'ErrataConnector', 'Erratum']
__version__ = '1.9.0'
| [
"[email protected]"
] | |
735cbb7dfb9b821b50fe2b7be81fe9770ca3d8d1 | ef16d4d796588cbf0d5cb0f84727812e7866f92e | /myvenv/bin/symilar | e547e7e0d4da4224aa8441aff9d3b4c6c945a143 | [] | no_license | shortnd/django_blog | 5b24f4c40cd79181a946de6f7edecc9490279839 | aaa8c92e3281924b2e2ece54338899c0879ee7b2 | refs/heads/master | 2020-05-03T17:19:54.363860 | 2019-03-31T21:04:23 | 2019-03-31T21:04:23 | 178,742,082 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | #!/Users/kortr/code/python/djangogirls/myvenv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pylint import run_symilar
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run_symilar())
| [
"[email protected]"
] | ||
066a1c5df73dd3d29347781664c88fc58a2ca994 | 019c78e21f861b6a56800877082a9c155dd8fb5f | /niveau-02/chapitre-4-fonctions/02-deux-codes-secrets-obligatoire.py | f63d0032b5d7f7e7b581e0a0a96d1af4008a9cd2 | [] | no_license | donnerc/oci-prog-exos | 0c0bd50a93896826251e343baa9836e6d09fc9fd | de5da49fb8a3df56ef12c4f9ea284d476e999434 | refs/heads/master | 2021-01-23T13:16:59.304820 | 2015-01-13T15:57:22 | 2015-01-13T15:57:22 | 13,173,560 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py |
##################################
# fichier 02-deux-codes-secrets-obligatoire.py
# nom de l'exercice : Deux codes secrets
# url : http://www.france-ioi.org/algo/task.php?idChapter=509&idTask=0&sTab=task&iOrder=3
# type : obligatoire
#
# Chapitre : chapitre-4-fonctions
#
# Compétence développée :
#
# auteur :
##################################
# chargement des modules
# mettre votre code ici
| [
"[email protected]"
] | |
a538af6a464fa56591c72692d25ab74aa2ef4463 | 7857b4f02001c3e0ac0317fa501a4bacc8ea335b | /logic_tutorial.py | 20c0ea7cc1dc5ceb8e4405692f0ade77a4859382 | [
"MIT"
] | permissive | twtrubiks/leetcode-python | 65a2035fe2c9e4a65b09f5d65df7b24be385d6fc | e46b32f6de4c0711ef44b7f2a482dc59657aa5e5 | refs/heads/master | 2022-01-12T01:34:50.628413 | 2022-01-10T06:30:21 | 2022-01-10T06:30:21 | 55,111,802 | 25 | 18 | null | null | null | null | UTF-8 | Python | false | false | 759 | py |
def trunk_1(arr_1, size_1):
result_1 = []
while arr:
pop_data = [arr_1.pop(0) for _ in range(size_1)]
result_1.append(pop_data)
return result_1
def trunk_2(arr_2, size_2):
arrs = []
while len(arr_2) > size_2:
pice = arr_2[:size_2]
arrs.append(pice)
arr_2 = arr_2[size:]
arrs.append(arr_2)
return arrs
def trunk_3(arr, size):
result = []
count = 0
while count < len(arr):
result.append(arr[count:count+size])
count += size
return result
if __name__ == "__main__":
'''
arr = [1, 2, 3, 4, 5, 6]
size = 2
result = [[1, 2], [3, 4], [5, 6]]
'''
arr = [1, 2, 3, 4, 5, 6]
size = 2
result = trunk_1(arr, size)
print(result)
| [
"[email protected]"
] | |
8a3de5ed32f6755687dc77858aa6898715c57094 | 4cca59f941adce8a2d71c00c0be5c06857f88dcc | /snisi_epidemiology/migrations/0002_auto_20141008_1439.py | 4dd9a51d66cf927b3667f0698d7d55c83299de1f | [
"MIT"
] | permissive | brahimmade/snisi | 7e4ce8e35150f601dd7b800bc422edec2d13063d | b4d0292b3314023ec9c984b776eaa63a0a0a266f | refs/heads/master | 2023-05-07T19:04:04.895987 | 2017-12-29T18:58:22 | 2017-12-29T18:58:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,137 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('snisi_epidemiology', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='aggepidemiologyr',
name='acute_flaccid_paralysis_case',
field=models.IntegerField(verbose_name='Suspected AFP cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='acute_flaccid_paralysis_death',
field=models.IntegerField(verbose_name='Suspected AFP death'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='acute_measles_diarrhea_case',
field=models.IntegerField(verbose_name='Suspected Acute Measles Diarrhea cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='acute_measles_diarrhea_death',
field=models.IntegerField(verbose_name='Suspected Acute Measles Diarrhea death'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='cholera_case',
field=models.IntegerField(verbose_name='Suspected Cholera cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='cholera_death',
field=models.IntegerField(verbose_name='Suspected Cholera death'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='ebola_case',
field=models.IntegerField(verbose_name='Suspected Ebola cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='ebola_death',
field=models.IntegerField(verbose_name='Suspected Ebola death'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='influenza_a_h1n1_case',
field=models.IntegerField(verbose_name='Suspected Influenza A H1N1 cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='influenza_a_h1n1_death',
field=models.IntegerField(verbose_name='Suspected Influenza A H1N1 death'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='measles_case',
field=models.IntegerField(verbose_name='Suspected Measles cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='measles_death',
field=models.IntegerField(verbose_name='Suspected Measles death'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='meningitis_case',
field=models.IntegerField(verbose_name='Suspected Meningitis cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='meningitis_death',
field=models.IntegerField(verbose_name='Suspected Meningitis death'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='neonatal_tetanus_case',
field=models.IntegerField(verbose_name='Suspected NNT cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='neonatal_tetanus_death',
field=models.IntegerField(verbose_name='Suspected NNT death'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='other_notifiable_disease_case',
field=models.IntegerField(verbose_name='Suspected Other Notifiable Diseases cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='other_notifiable_disease_death',
field=models.IntegerField(verbose_name='Suspected Other Notifiable Diseases death'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='rabies_case',
field=models.IntegerField(verbose_name='Suspected Rabies cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='rabies_death',
field=models.IntegerField(verbose_name='Suspected Rabies death'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='red_diarrhea_case',
field=models.IntegerField(verbose_name='Suspected Red Diarrhea cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='red_diarrhea_death',
field=models.IntegerField(verbose_name='Suspected Red Diarrhea death'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='yellow_fever_case',
field=models.IntegerField(verbose_name='Suspected Yellow Fever cases'),
),
migrations.AlterField(
model_name='aggepidemiologyr',
name='yellow_fever_death',
field=models.IntegerField(verbose_name='Suspected Yellow Fever death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='acute_flaccid_paralysis_case',
field=models.IntegerField(verbose_name='Suspected AFP cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='acute_flaccid_paralysis_death',
field=models.IntegerField(verbose_name='Suspected AFP death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='acute_measles_diarrhea_case',
field=models.IntegerField(verbose_name='Suspected Acute Measles Diarrhea cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='acute_measles_diarrhea_death',
field=models.IntegerField(verbose_name='Suspected Acute Measles Diarrhea death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='cholera_case',
field=models.IntegerField(verbose_name='Suspected Cholera cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='cholera_death',
field=models.IntegerField(verbose_name='Suspected Cholera death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='ebola_case',
field=models.IntegerField(verbose_name='Suspected Ebola cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='ebola_death',
field=models.IntegerField(verbose_name='Suspected Ebola death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='influenza_a_h1n1_case',
field=models.IntegerField(verbose_name='Suspected Influenza A H1N1 cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='influenza_a_h1n1_death',
field=models.IntegerField(verbose_name='Suspected Influenza A H1N1 death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='measles_case',
field=models.IntegerField(verbose_name='Suspected Measles cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='measles_death',
field=models.IntegerField(verbose_name='Suspected Measles death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='meningitis_case',
field=models.IntegerField(verbose_name='Suspected Meningitis cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='meningitis_death',
field=models.IntegerField(verbose_name='Suspected Meningitis death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='neonatal_tetanus_case',
field=models.IntegerField(verbose_name='Suspected NNT cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='neonatal_tetanus_death',
field=models.IntegerField(verbose_name='Suspected NNT death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='other_notifiable_disease_case',
field=models.IntegerField(verbose_name='Suspected Other Notifiable Diseases cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='other_notifiable_disease_death',
field=models.IntegerField(verbose_name='Suspected Other Notifiable Diseases death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='rabies_case',
field=models.IntegerField(verbose_name='Suspected Rabies cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='rabies_death',
field=models.IntegerField(verbose_name='Suspected Rabies death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='red_diarrhea_case',
field=models.IntegerField(verbose_name='Suspected Red Diarrhea cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='red_diarrhea_death',
field=models.IntegerField(verbose_name='Suspected Red Diarrhea death'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='yellow_fever_case',
field=models.IntegerField(verbose_name='Suspected Yellow Fever cases'),
),
migrations.AlterField(
model_name='epidemiologyr',
name='yellow_fever_death',
field=models.IntegerField(verbose_name='Suspected Yellow Fever death'),
),
]
| [
"[email protected]"
] | |
af30001153143516bb60447c5a6baee10e8ce452 | 8d3713030d02e34eb37b149d0bc2a8fd25fec7f7 | /problem111.py | d028894946ae341863fed4e2ca6ad0ba8893cf7f | [] | no_license | GlenHaber/euler | cd3a34550a0c6189a17fbc26991393ee6a4ab8d6 | cb3259f375c1f21af7daf79ab19532518765bbc8 | refs/heads/master | 2021-01-19T13:06:52.579227 | 2017-06-09T21:07:33 | 2017-06-09T21:07:33 | 100,825,188 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,010 | py | """
Primes with runs
Considering 4-digit primes containing repeated digits it is clear that they cannot all be the same: 1111 is divisible by
11, 2222 is divisible by 22, and so on. But there are nine 4-digit primes containing three ones:
1117, 1151, 1171, 1181, 1511, 1811, 2111, 4111, 8111
We shall say that M(n, d) represents the maximum number of repeated digits for an n-digit prime where d is the repeated
digit, N(n, d) represents the number of such primes, and S(n, d) represents the sum of these primes.
So M(4, 1) = 3 is the maximum number of repeated digits for a 4-digit prime where one is the repeated digit, there are
N(4, 1) = 9 such primes, and the sum of these primes is S(4, 1) = 22275. It turns out that for d = 0, it is only
possible to have M(4, 0) = 2 repeated digits, but there are N(4, 0) = 13 such cases.
In the same way we obtain the following results for 4-digit primes.
d M(4, d) N(4, d) S(4, d)
0 2 13 67061
1 3 9 22275
2 3 1 2221
3 3 12 46214
4 3 2 8888
5 3 1 5557
6 3 1 6661
7 3 9 57863
8 3 1 8887
9 3 7 48073
For d = 0 to 9, the sum of all S(4, d) is 273700.
Find the sum of all S(10, d).
"""
from common import is_prime, miller_rabin_test
# Brute force the case in the example to make sure I get it
def n_digit_primes(n):
for i in range(10 ** (n - 1), 10 ** n):
if is_prime(i):
yield i
def M(n, d):
return max(str(num).count(str(d)) for num in n_digit_primes(n))
def N(n, d):
nums = list(n_digit_primes(n))
M = max(str(num).count(str(d)) for num in nums)
return len([n for n in nums if str(n).count(str(d)) == M])
def S(n, d, nums=None):
if nums is None:
nums = list(n_digit_primes(n))
M = max(str(num).count(str(d)) for num in nums)
return sum([n for n in nums if str(n).count(str(d)) == M])
assert sum(S(4, d) for d in range(10)) == 273700
number = [0] * 10
# Shamelessly taken from mathblog.dk
def recurse(basedigit, startpos, level, fill=False):
global number
if level <= 0:
if number[0] == 0:
return 0
n = sum(10 ** i * x for i, x in enumerate(number[::-1]))
return n if miller_rabin_test(n) else 0
res = 0
if fill:
for pos in range(len(number)):
number[pos] = basedigit
for pos in range(startpos, len(number)):
for val in range(10):
number[pos] = val
res += recurse(basedigit, pos + 1, level - 1)
number[pos] = basedigit
return res
total = 0
for d in range(10):
for i in range(1, len(number)):
res = recurse(d, 0, i, True)
if res:
total += res
break
print('Answer:', total)
# primes = list(n_digit_primes(10))
# print(len(primes), 'primes generated')
# print('Answer:', sum(S(10, d, primes) for d in range(10)))
| [
"[email protected]"
] | |
cbb1fc6301940401b020a676152a2dd636acf9ef | 5dd47abf7061201d9378e73e51f08fbb314ba2fd | /envdsys/envcontacts/migrations/0074_auto_20210227_1830.py | 129e1c19b1364390a394fc2a8aa7265a96484ea8 | [
"Unlicense"
] | permissive | NOAA-PMEL/envDataSystem | 4d264ae5209015e4faee648f37608d68a4461d0a | 4db4a3569d2329658799a3eef06ce36dd5c0597d | refs/heads/master | 2023-02-23T22:33:14.334737 | 2021-07-22T01:09:16 | 2021-07-22T01:09:16 | 191,809,007 | 1 | 0 | Unlicense | 2023-02-08T00:45:54 | 2019-06-13T17:50:03 | Python | UTF-8 | Python | false | false | 1,175 | py | # Generated by Django 3.1.7 on 2021-02-27 18:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('envcontacts', '0073_auto_20210227_1819'),
]
operations = [
migrations.AlterField(
model_name='person',
name='email1_type',
field=models.CharField(choices=[('W', 'Work'), ('H', 'Home'), ('O', 'Other')], default='W', max_length=1),
),
migrations.AlterField(
model_name='person',
name='email2_type',
field=models.CharField(choices=[('W', 'Work'), ('H', 'Home'), ('O', 'Other')], default='W', max_length=1),
),
migrations.AlterField(
model_name='person',
name='phone1_type',
field=models.CharField(choices=[('M', 'Mobile'), ('W', 'Work'), ('H', 'Home'), ('O', 'Other')], default='M', max_length=1),
),
migrations.AlterField(
model_name='person',
name='phone2_type',
field=models.CharField(choices=[('M', 'Mobile'), ('W', 'Work'), ('H', 'Home'), ('O', 'Other')], default='M', max_length=1),
),
]
| [
"[email protected]"
] | |
f03af2a7915d3835033777ee323af7c7ddf60627 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03049/s862641058.py | d26e8c01dc0350e1428c4fbcbbbc4791d8acc382 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | py | n,*s=open(0).read().split()
a=0;b=0;ba=0;ans=0
for s_i in s:
b+=(s_i[0]=='B')*(not s_i[-1]=='A')
a+=(not s_i[0]=='B')*(s_i[-1]=='A')
ba+=(s_i[0]=='B')*(s_i[-1]=='A')
ans+=s_i.count('AB')
print(ans+ba+min(a,b)-(ba>0)*(a+b==0)) | [
"[email protected]"
] | |
ccadf51ea06ea13387ab2c4085caaed98e426aaf | 525f5ba86e1476d5f0dc396e225d544beb43cd3b | /nomenklatura/query/builder.py | 65e44f59be0c769551f83140ad62fed9b1e909cb | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | he0x/nomenklatura | bb47cd9103b03893832b4bda4bd69cba80473976 | b2e0a989de7aa4a08a63e22982c4904b255dc04a | refs/heads/master | 2021-01-15T08:18:29.257815 | 2015-03-28T20:15:52 | 2015-03-28T20:15:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,177 | py | from sqlalchemy.orm import aliased
from nomenklatura.core import db, url_for
from nomenklatura.schema import attributes
from nomenklatura.model.statement import Statement
from nomenklatura.model.context import Context
# from nomenklatura.model.type import Type
class QueryBuilder(object):
def __init__(self, dataset, parent, node):
self.dataset = dataset
self.parent = parent
self.node = node
self.results = {}
@property
def children(self):
if not hasattr(self, '_children'):
self._children = []
for child_node in self.node.children:
qb = QueryBuilder(self.dataset, self, child_node)
self._children.append(qb)
return self._children
def _add_statement(self, q):
""" Generate a linked statement that can be used in any
part of the query. """
stmt = aliased(Statement)
ctx = aliased(Context)
q = q.filter(stmt.context_id == ctx.id)
q = q.filter(stmt.dataset_id == self.dataset.id)
q = q.filter(ctx.active == True) # noqa
return stmt, q
def filter_value(self, q, stmt):
q = q.filter(stmt._value == self.node.value)
return q
def filter(self, q, stmt):
""" Apply filters to the given query recursively. """
if not self.node.filtered:
return q
filter_stmt, q = self._add_statement(q)
q = q.filter(stmt.subject == filter_stmt.subject)
if self.node.attribute:
q = q.filter(stmt._attribute == self.node.attribute.name)
if self.node.leaf:
return self.filter_value(q, filter_stmt)
for child in self.children:
q = child.filter(q, stmt)
return q
def filter_query(self, parents=None):
""" An inner query that is used to apply any filters, limits
and offset. """
q = db.session.query()
stmt, q = self._add_statement(q)
q = q.add_column(stmt.subject)
if parents is not None and self.node.attribute:
parent_stmt, q = self._add_statement(q)
q = q.filter(stmt.subject == parent_stmt._value)
q = q.filter(parent_stmt._attribute == self.node.attribute.name)
q = q.filter(parent_stmt.subject.in_(parents))
q = self.filter(q, stmt)
q = q.group_by(stmt.subject)
q = q.order_by(stmt.subject.asc())
if self.node.root:
q = q.limit(self.node.limit)
q = q.offset(self.node.offset)
return q
def nested(self):
""" A list of all sub-entities for which separate queries will
be conducted. """
for child in self.children:
if child.node.leaf or not child.node.attribute:
continue
if child.node.attribute.data_type == 'entity':
yield child
def project(self):
""" Figure out which attributes should be returned for the current
level of the query. """
attrs = set()
for child in self.children:
if child.node.blank and child.node.leaf:
attrs.update(child.node.attributes)
attrs = attrs if len(attrs) else attributes
skip_nested = [n.node.attribute for n in self.nested()]
return [a.name for a in attrs if a not in skip_nested]
def base_object(self, data):
""" Make sure to return all the existing filter fields
for query results. """
obj = {
'id': data.get('id'),
'api_url': url_for('entities.view', dataset=self.dataset.slug,
id=data.get('id')),
'parent_id': data.get('parent_id')
}
for child in self.children:
if child.node.leaf and child.node.filtered:
obj[child.node.name] = child.node.raw
return obj
return obj
def get_node(self, name):
""" Get the node for a given name. """
for child in self.children:
if child.node.name == name:
return child.node
return None if name == '*' else self.get_node('*')
def data_query(self, parents=None):
""" Generate a query for any statement which matches the criteria
specified through the filter query. """
filter_q = self.filter_query(parents=parents)
q = db.session.query()
stmt, q = self._add_statement(q)
filter_sq = filter_q.subquery()
q = q.filter(stmt.subject == filter_sq.c.subject)
q = q.filter(stmt._attribute.in_(self.project()))
q = q.add_column(stmt.subject.label('id'))
q = q.add_column(stmt._attribute.label('attribute'))
q = q.add_column(stmt._value.label('value'))
if parents is not None and self.node.attribute:
parent_stmt, q = self._add_statement(q)
q = q.filter(stmt.subject == parent_stmt._value)
q = q.filter(parent_stmt._attribute == self.node.attribute.name)
q = q.add_column(parent_stmt.subject.label('parent_id'))
q = q.order_by(filter_sq.c.subject.desc())
q = q.order_by(stmt.created_at.asc())
return q
def execute(self, parents=None):
""" Run the data query and construct entities from it's results. """
results = {}
for row in self.data_query(parents=parents):
data = row._asdict()
id = data.get('id')
if id not in results:
results[id] = self.base_object(data)
value = data.get('value')
attr = attributes[data.get('attribute')]
if attr.data_type not in ['type', 'entity']:
conv = attr.converter(self.dataset, attr)
value = conv.deserialize_safe(value)
node = self.get_node(data.get('attribute'))
if attr.many if node is None else node.many:
if attr.name not in results[id]:
results[id][attr.name] = []
results[id][attr.name].append(value)
else:
results[id][attr.name] = value
return results
def collect(self, parents=None):
""" Given re-constructed entities, conduct queries for child
entities and merge them into the current level's object graph. """
results = self.execute(parents=parents)
ids = results.keys()
for child in self.nested():
attr = child.node.attribute.name
for child_data in child.collect(parents=ids).values():
parent_id = child_data.pop('parent_id')
if child.node.many:
if attr not in results[parent_id]:
results[parent_id][attr] = []
results[parent_id][attr].append(child_data)
else:
results[parent_id][attr] = child_data
return results
def query(self):
results = []
for result in self.collect().values():
result.pop('parent_id')
if not self.node.many:
return result
results.append(result)
return results
| [
"[email protected]"
] | |
8358602e69b3372bacd7a45ddadd7849c1ccf792 | 650b3dd4cc74f32db78f7d99cef9907aec78a222 | /dialogs/Report/fPettyCashReport_data.py | d224b848def8260b8a4ae4863468aef52b0886ab | [] | no_license | mech4/PKTrx | 29b871ab587434e7c208175c248f48d9b6c80a17 | cf01bc5be8837d632974786d2419c58b94a0381d | refs/heads/master | 2020-03-29T19:55:07.331831 | 2012-09-18T20:22:52 | 2012-09-18T20:22:52 | 6,289,691 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 228 | py |
import sys
def CetakData(config,parameters,returns):
ret = returns.CreateValues(
['IsErr', 0],
['ErrMessage','']
)
try:
except:
ret.IsErr = 1
ret.ErrMessage = str(sys.exc_info()[1])
| [
"[email protected]"
] | |
e82dcd7e2c42de6224abe59e0b0800eb2ca85e3e | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-roma/huaweicloudsdkroma/v2/model/export_asset_response.py | 105871097c009b0aa7c5f4c4354c87308f05e410 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 3,344 | py | # coding: utf-8
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ExportAssetResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'job_id': 'str'
}
attribute_map = {
'job_id': 'job_id'
}
def __init__(self, job_id=None):
"""ExportAssetResponse
The model defined in huaweicloud sdk
:param job_id: 资产导出作业的ID,可用于查询作业进度,获取导出作业进度
:type job_id: str
"""
super(ExportAssetResponse, self).__init__()
self._job_id = None
self.discriminator = None
if job_id is not None:
self.job_id = job_id
@property
def job_id(self):
"""Gets the job_id of this ExportAssetResponse.
资产导出作业的ID,可用于查询作业进度,获取导出作业进度
:return: The job_id of this ExportAssetResponse.
:rtype: str
"""
return self._job_id
@job_id.setter
def job_id(self, job_id):
"""Sets the job_id of this ExportAssetResponse.
资产导出作业的ID,可用于查询作业进度,获取导出作业进度
:param job_id: The job_id of this ExportAssetResponse.
:type job_id: str
"""
self._job_id = job_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ExportAssetResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
4ec02b40b4eaef9cd000b9f4fed6b0c691c3f47d | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_21454.py | 45e7d2122ab9b2a6b0f3906d21dcb18297aff031 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | # Updating collections from collections
db.Coll2.find().forEach(function(c2){
db.Coll1.update({isbn:c2.isbn},{$set: {category:c2.category}},{multi:true})
});
| [
"[email protected]"
] | |
151f8401dd23cc073bf7bb3fbb5cbf94fb035bc6 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_greyhounds.py | b30dc2d9386d20b4a85bd14ebe73422e2417bc96 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | py |
from xai.brain.wordbase.nouns._greyhound import _GREYHOUND
#calss header
class _GREYHOUNDS(_GREYHOUND, ):
def __init__(self,):
_GREYHOUND.__init__(self)
self.name = "GREYHOUNDS"
self.specie = 'nouns'
self.basic = "greyhound"
self.jsondata = {}
| [
"[email protected]"
] | |
dfabfbb7eef76a04289682868648631a818c208c | 198ac210d70c865367fb73fb3c8f99b06cdd91d0 | /tests/integration/steam_simulator.py | 82382ec24aaef6653b83a506c43439f19fc0d0c8 | [
"BSD-3-Clause"
] | permissive | gutomaia/steamer-py | 0f2bc6a81bfab6986470b03b370ccf53941432ff | 7175fb1d79fe6ffc0c31b3e74f62805629e457b2 | refs/heads/master | 2021-01-10T19:30:50.429652 | 2013-04-29T13:38:30 | 2013-04-29T13:38:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 870 | py | import threading
from time import sleep
import web
import requests
urls = (
'/id/(\w+)/stats/(\w+)', 'game_page'
)
class game_page(object):
def GET(self, user, game):
f = open('fixtures/%s-%s.xml' % (user, game))
xml = f.read()
f.close()
web.header('Content-Length', len(xml))
return xml
class SteamSimulator(threading.Thread):
def __init__(self):
super(SteamSimulator, self).__init__()
self._stop = threading.Event()
def run(self):
self.app = web.application(urls, globals())
web.config.default_port = 8080
self.app.internalerror = web.debugerror
self.app.run()
def stop(self):
self.app.stop()
self._stop.set()
def stopped(self):
return self._stop.isSet()
if __name__ == "__main__":
sim = SteamSimulator()
sim.run() | [
"[email protected]"
] | |
6d72629e2166ef7142a7423e4d47ebcc5b93f571 | 869b8c7a526ebfbe6b55832ce9f081cd0218a4f5 | /onconet/models/spatial_transformers/factory.py | 0ad65b9657f472eb0d7eaebe27c8ff71d8b3ee59 | [
"MIT"
] | permissive | yala/Mirai | 54d1ab1496d35c05553cfe1c255e7c3012462ce4 | 12bace8fd6ce9c5bb129fd0d30a46a00a2f7b054 | refs/heads/master | 2023-04-29T11:12:28.853712 | 2023-02-24T21:28:20 | 2023-02-24T21:28:20 | 315,745,008 | 66 | 23 | MIT | 2022-02-07T20:49:05 | 2020-11-24T20:29:22 | Python | UTF-8 | Python | false | false | 688 | py | SPATIAL_TRANSFORMER_REGISTRY = {}
NO_SPATIAL_TRANSFORMER_ERR = 'Pool {} not in SPATIAL_TRANSFORMER! Available spatial transformers are {}'
def RegisterSpatialTransformer(st_name):
"""Registers a pool."""
def decorator(f):
SPATIAL_TRANSFORMER_REGISTRY[st_name] = f
return f
return decorator
def get_spatial_transformer(st_name):
"""Get pool from POOL_REGISTRY based on pool_name."""
if not st_name in SPATIAL_TRANSFORMER_REGISTRY:
raise Exception(NO_SPATIAL_TRANSFORMER_ERR.format(
pool_name, SPATIAL_TRANSFORMER_REGISTRY.keys()))
spatial_transformer = SPATIAL_TRANSFORMER_REGISTRY[st_name]
return spatial_transformer
| [
"[email protected]"
] | |
0331644aa9c6ce4d3b15eb5d286fa083f49458af | 4723d9818d8b52bcfa2315a59ceb4acf1731b761 | /pysgg/engine/inference.py | 7b71c8bd65ef28cc62751bb7f02222daf39f8a96 | [
"MIT",
"Python-2.0"
] | permissive | rafa-cxg/PySGG | fe8b34157438d73e7a91a846a3428f411a9b2535 | 5b758cd811e81cd47781fb4028011a012d91fcff | refs/heads/main | 2023-08-30T09:22:04.937170 | 2021-10-29T02:31:41 | 2021-10-29T02:31:41 | 425,873,560 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,090 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import logging
import os
import torch
from tqdm import tqdm
from pysgg.config import cfg
from pysgg.data.datasets.evaluation import evaluate
from .bbox_aug import im_detect_bbox_aug
from ..utils.comm import all_gather
from ..utils.comm import is_main_process, get_world_size
from ..utils.comm import synchronize
from ..utils.timer import Timer, get_time_str
def compute_on_dataset(model, data_loader, device, synchronize_gather=True, timer=None, logger=None):
"""
:param model:
:param data_loader:
:param device:
:param synchronize_gather: gather the predictions during the training,
rather than gathering all predictions after the training
:param timer:
:return:
"""
model.eval()
results_dict = {}
cpu_device = torch.device("cpu")
for _, batch in enumerate(tqdm(data_loader)):
with torch.no_grad():
images, targets, image_ids = batch
targets = [target.to(device) for target in targets]
if timer:
timer.tic()
if cfg.TEST.BBOX_AUG.ENABLED:
output = im_detect_bbox_aug(model, images, device)
else:
# relation detection needs the targets
output = model(images.to(device), targets, logger=logger)
if timer:
if not cfg.MODEL.DEVICE == 'cpu':
torch.cuda.synchronize()
timer.toc()
output = [o.to(cpu_device) for o in output]
if synchronize_gather:
synchronize()
multi_gpu_predictions = all_gather({img_id: result for img_id, result in zip(image_ids, output)})
if is_main_process():
for p in multi_gpu_predictions:
results_dict.update(p)
else:
results_dict.update(
{img_id: result for img_id, result in zip(image_ids, output)}
)
return results_dict
def _accumulate_predictions_from_multiple_gpus(predictions_per_gpu, synchronize_gather=True):
if not synchronize_gather:
all_predictions = all_gather(predictions_per_gpu)
if not is_main_process():
return
if synchronize_gather:
predictions = predictions_per_gpu
else:
# merge the list of dicts
predictions = {}
for p in all_predictions:
predictions.update(p)
# convert a dict where the key is the index in a list
image_ids = list(sorted(predictions.keys()))
if len(image_ids) != image_ids[-1] + 1:
logger = logging.getLogger("pysgg.inference")
logger.warning(
"WARNING! WARNING! WARNING! WARNING! WARNING! WARNING!"
"Number of images that were gathered from multiple processes is not "
"a contiguous set. Some images might be missing from the evaluation"
)
logger.info(f"len(image_ids) {len(image_ids)}, image_ids[-1] + 1 {image_ids[-1] + 1}")
# convert to a list
predictions = [predictions[i] for i in image_ids]
return predictions
def inference(
cfg,
model,
data_loader,
dataset_name,
iou_types=("bbox",),
box_only=False,
device="cuda",
expected_results=(),
expected_results_sigma_tol=4,
output_folder=None,
logger=None,
):
load_prediction_from_cache = cfg.TEST.ALLOW_LOAD_FROM_CACHE and output_folder is not None and os.path.exists(
os.path.join(output_folder, "eval_results.pytorch"))
# convert to a torch.device for efficiency
device = torch.device(device)
num_devices = get_world_size()
if logger is None:
logger = logging.getLogger("pysgg.inference")
dataset = data_loader.dataset
logger.info("Start evaluation on {} dataset({} images).".format(dataset_name, len(dataset)))
total_timer = Timer()
inference_timer = Timer()
total_timer.tic()
if load_prediction_from_cache:
logging.info("load_prediction_from_cache: " + os.path.join(output_folder, "eval_results.pytorch"))
predictions = torch.load(os.path.join(output_folder, "eval_results.pytorch"),
map_location=torch.device("cpu"))['predictions']
else:
predictions = compute_on_dataset(model, data_loader, device,
synchronize_gather=cfg.TEST.RELATION.SYNC_GATHER,
timer=inference_timer, logger=logger)
# wait for all processes to complete before measuring the time
synchronize()
total_time = total_timer.toc()
total_time_str = get_time_str(total_time)
logger.info(
"Total run time: {} ({} s / img per device, on {} devices)".format(
total_time_str, total_time * num_devices / len(dataset), num_devices
)
)
total_infer_time = get_time_str(inference_timer.total_time)
logger.info(
"Model inference time: {} ({} s / img per device, on {} devices)".format(
total_infer_time,
inference_timer.total_time * num_devices / len(dataset),
num_devices,
)
)
if not load_prediction_from_cache:
predictions = _accumulate_predictions_from_multiple_gpus(predictions,
synchronize_gather=cfg.TEST.RELATION.SYNC_GATHER)
if not is_main_process():
return -1.0
# if output_folder is not None and not load_prediction_from_cache:
# torch.save(predictions, os.path.join(output_folder, "predictions.pth"))
extra_args = dict(
box_only=box_only,
iou_types=iou_types,
expected_results=expected_results,
expected_results_sigma_tol=expected_results_sigma_tol,
)
return evaluate(cfg=cfg,
dataset=dataset,
predictions=predictions,
output_folder=output_folder,
logger=logger,
**extra_args)
| [
"[email protected]"
] | |
c8db93ac8b84069eaa3db4066fd55c60f660c841 | 9249947c07f8addf64dd3d2a2f9f37d379f83921 | /libs/gluon/contrib/generics.py | abaa95f64160cec56f5ab445a32e92a16a8ff4fd | [
"MIT"
] | permissive | operepo/ope | eb71aa763d157416009d7c3052ace11852660e0a | 018c82af46845315795c67c36801e2a128f515d5 | refs/heads/master | 2023-08-08T15:05:28.592589 | 2023-07-25T00:22:24 | 2023-07-25T00:22:24 | 96,855,111 | 12 | 11 | MIT | 2023-03-03T15:10:34 | 2017-07-11T05:42:14 | Perl | UTF-8 | Python | false | false | 2,528 | py | # fix response
import os
from gluon import current, HTTP
from gluon.html import markmin_serializer, TAG, HTML, BODY, UL, XML, H1
from gluon.contrib.fpdf import FPDF, HTMLMixin
from gluon.sanitizer import sanitize
from gluon.contrib.markmin.markmin2latex import markmin2latex
from gluon.contrib.markmin.markmin2pdf import markmin2pdf
def wrapper(f):
def g(data):
try:
output = f(data)
return XML(ouput)
except (TypeError, ValueError), e:
raise HTTP(405, '%s serialization error' % e)
except ImportError, e:
raise HTTP(405, '%s not available' % e)
except Exception, e:
raise HTTP(405, '%s error' % e)
return g
def latex_from_html(html):
markmin = TAG(html).element('body').flatten(markmin_serializer)
return XML(markmin2latex(markmin))
def pdflatex_from_html(html):
if os.system('which pdflatex > /dev/null') == 0:
markmin = TAG(html).element('body').flatten(markmin_serializer)
out, warnings, errors = markmin2pdf(markmin)
if errors:
current.response.headers['Content-Type'] = 'text/html'
raise HTTP(405, HTML(BODY(H1('errors'),
UL(*errors),
H1('warnings'),
UL(*warnings))).xml())
else:
return XML(out)
def pyfpdf_from_html(html):
request = current.request
def image_map(path):
if path.startswith('/%s/static/' % request.application):
return os.path.join(request.folder, path.split('/', 2)[2])
return 'http%s://%s%s' % (request.is_https and 's' or '', request.env.http_host, path)
class MyFPDF(FPDF, HTMLMixin):
pass
pdf = MyFPDF()
pdf.add_page()
# pyfpdf needs some attributes to render the table correctly:
html = sanitize(
html, allowed_attributes={
'a': ['href', 'title'],
'img': ['src', 'alt'],
'blockquote': ['type'],
'td': ['align', 'bgcolor', 'colspan', 'height', 'width'],
'tr': ['bgcolor', 'height', 'width'],
'table': ['border', 'bgcolor', 'height', 'width'],
}, escape=False)
pdf.write_html(html, image_map=image_map)
return XML(pdf.output(dest='S'))
def pdf_from_html(html):
# try use latex and pdflatex
if os.system('which pdflatex > /dev/null') == 0:
return pdflatex_from_html(html)
else:
return pyfpdf_from_html(html)
| [
"[email protected]"
] | |
b3b956cf8f2482a45cd555f202e06a02b98b7d41 | 5f61724fc5cad3f82094a681c853cc9f0337f050 | /test/test_section.py | 41d67a7de2c3641cf36ab6ae71a3a5eccb98bd42 | [
"Apache-2.0"
] | permissive | barseghyanartur/odfdo | 2cecbbbb33f23d5ed0ba80cb9208a8e7857b93a0 | e628a9e9daa40319a777d216ec7ebca4057b3344 | refs/heads/master | 2022-11-17T15:43:15.662484 | 2020-06-27T00:41:38 | 2020-06-28T22:53:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,612 | py | #!/usr/bin/env python
# Copyright 2018 Jérôme Dumonteil
# Copyright (c) 2009-2010 Ars Aperta, Itaapy, Pierlis, Talend.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Authors (odfdo project): [email protected]
# The odfdo project is a derivative work of the lpod-python project:
# https://github.com/lpod/lpod-python
# Authors: Hervé Cauwelier <[email protected]>
from unittest import TestCase, main
from odfdo.document import Document
from odfdo.section import Section
class TestSection(TestCase):
def setUp(self):
self.document = document = Document('samples/base_text.odt')
self.body = document.body
def test_create_simple_section(self):
"""The idea is to test only with the mandatory arguments (none
in this case), not to test odf_create_element which is done in
test_xmlpart.
"""
element = Section()
excepted = '<text:section/>'
self.assertEqual(element.serialize(), excepted)
def test_create_complex_section(self):
"""The idea is to test with all possible arguments. If some arguments
are contradictory or trigger different behaviours, test all those
combinations separately.
"""
element = Section(style='Standard')
excepted = '<text:section text:style-name="Standard"/>'
self.assertEqual(element.serialize(), excepted)
def test_get_section_list(self):
body = self.body
sections = body.get_sections()
self.assertEqual(len(sections), 2)
second = sections[1]
name = second.name
self.assertEqual(name, "Section2")
def test_get_section_list_style(self):
body = self.body
sections = body.get_sections(style='Sect1')
self.assertEqual(len(sections), 2)
section = sections[0]
name = section.name
self.assertEqual(name, "Section1")
def test_get_section(self):
body = self.body
section = body.get_section(position=1)
name = section.name
self.assertEqual(name, "Section2")
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
df94816cf1d341645c00813001ccbbdc695412c4 | a363b1ad911b8c989e578b5a4a412c1dd615cc39 | /toontown/building/ToonInteriorColors.py | 56f103d58606c8a93f75fab6679a53c759bd7641 | [
"Apache-2.0"
] | permissive | OSToontown/Project-Altis-Alpha | 2999e944c44e0409cb19e277da61807bfa871e86 | 3a542b5d19784e9c4a5b893e88617e5280b213dd | refs/heads/master | 2023-06-26T12:12:35.073103 | 2021-07-24T17:20:43 | 2021-07-24T17:20:43 | 248,406,248 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,857 | py | from toontown.toonbase.ToontownGlobals import *
wainscottingBase = [Vec4(0.8, 0.5, 0.3, 1.0), Vec4(0.699, 0.586, 0.473, 1.0), Vec4(0.473, 0.699, 0.488, 1.0)]
wallpaperBase = [Vec4(1.0, 1.0, 0.7, 1.0),
Vec4(0.8, 1.0, 0.7, 1.0),
Vec4(0.4, 0.5, 0.4, 1.0),
Vec4(0.5, 0.7, 0.6, 1.0)]
wallpaperBorderBase = [Vec4(1.0, 1.0, 0.7, 1.0),
Vec4(0.8, 1.0, 0.7, 1.0),
Vec4(0.4, 0.5, 0.4, 1.0),
Vec4(0.5, 0.7, 0.6, 1.0)]
doorBase = [Vec4(1.0, 1.0, 0.7, 1.0)]
floorBase = [Vec4(0.746, 1.0, 0.477, 1.0), Vec4(1.0, 0.684, 0.477, 1.0)]
baseScheme = {'TI_wainscotting': wainscottingBase,
'TI_wallpaper': wallpaperBase,
'TI_wallpaper_border': wallpaperBorderBase,
'TI_door': doorBase,
'TI_floor': floorBase}
colors = {DonaldsDock: {'TI_wainscotting': wainscottingBase,
'TI_wallpaper': wallpaperBase,
'TI_wallpaper_border': wallpaperBorderBase,
'TI_door': doorBase,
'TI_floor': floorBase},
ToontownCentral: {'TI_wainscotting': wainscottingBase,
'TI_wallpaper': wallpaperBase,
'TI_wallpaper_border': wallpaperBorderBase,
'TI_door': doorBase + [Vec4(0.8, 0.5, 0.3, 1.0)],
'TI_floor': floorBase},
TheBrrrgh: baseScheme,
MinniesMelodyland: baseScheme,
DaisyGardens: baseScheme,
GoofySpeedway: baseScheme,
DonaldsDreamland: {'TI_wainscotting': wainscottingBase,
'TI_wallpaper': wallpaperBase,
'TI_wallpaper_border': wallpaperBorderBase,
'TI_door': doorBase,
'TI_floor': floorBase},
Tutorial: {'TI_wainscotting': wainscottingBase,
'TI_wallpaper': wallpaperBase,
'TI_wallpaper_border': wallpaperBorderBase,
'TI_door': doorBase + [Vec4(0.8, 0.5, 0.3, 1.0)],
'TI_floor': floorBase},
MyEstate: baseScheme}
| [
"[email protected]"
] | |
13e109703253a9f3a1da4c8dd08d3e4292e6bbd9 | cfb76fefdf3d991ca516d10ee04afda061fd9b7f | /tests/test_pcolormesh.py | 5363db32fb91669dcb3d1c11edbb4d67b15e1858 | [
"MIT"
] | permissive | chebee7i/prettyplotlib | 77d7fd3941877d694b4237850cfa75605a2954d7 | 68841f0156e29eec4fc76c53407e67206287b861 | refs/heads/master | 2021-01-20T23:11:57.745272 | 2013-10-06T20:04:12 | 2013-10-06T20:04:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,589 | py | __author__ = 'olga'
from matplotlib.testing.decorators import image_comparison
import prettyplotlib as ppl
from prettyplotlib import plt
import numpy as np
import os
import string
from prettyplotlib import brewer2mpl
from matplotlib.colors import LogNorm
@image_comparison(baseline_images=['pcolormesh'], extensions=['png'])
def test_pcolormesh():
fig, ax = plt.subplots(1)
np.random.seed(10)
ppl.pcolormesh(fig, ax, np.random.randn(10, 10))
# fig.savefig('%s/baseline_images/test_pcolormesh/pcolormesh.png' %
# os.path.dirname(__file__))
@image_comparison(baseline_images=['pcolormesh_labels'], extensions=['png'])
def test_pcolormesh_labels():
fig, ax = plt.subplots(1)
np.random.seed(10)
ppl.pcolormesh(fig, ax, np.random.randn(10, 10),
xticklabels=string.uppercase[:10],
yticklabels=string.lowercase[-10:])
# fig.savefig('%s/baseline_images/test_pcolormesh/pcolormesh_labels.png' %
# os.path.dirname(__file__))
@image_comparison(baseline_images=['pcolormesh_positive'], extensions=['png'])
def test_pcolormesh_positive():
fig, ax = plt.subplots(1)
np.random.seed(10)
ppl.pcolormesh(fig, ax, np.random.uniform(size=(10, 10)),
xticklabels=string.uppercase[:10],
yticklabels=string.lowercase[-10:])
# fig.savefig('%s/baseline_images/test_pcolormesh/pcolormesh_positive.png' %
# os.path.dirname(__file__))
@image_comparison(baseline_images=['pcolormesh_negative'], extensions=['png'])
def test_pcolormesh_negative():
fig, ax = plt.subplots(1)
np.random.seed(10)
ppl.pcolormesh(fig, ax, -np.random.uniform(size=(10, 10)),
xticklabels=string.uppercase[:10],
yticklabels=string.lowercase[-10:])
# fig.savefig('%s/baseline_images/test_pcolormesh/pcolormesh_negative.png' %
# os.path.dirname(__file__))
@image_comparison(baseline_images=['pcolormesh_other_cmap'], extensions=['png'])
def test_pcolormesh_other_cmap():
purple_green = brewer2mpl.get_map('PRGn', 'diverging', 11).mpl_colormap
fig, ax = plt.subplots(1)
np.random.seed(10)
ppl.pcolormesh(fig, ax, np.random.randn(10, 10), cmap=purple_green)
# fig.savefig('%s/baseline_images/test_pcolormesh/pcolormesh_other_cmap.png' %
# os.path.dirname(__file__))
@image_comparison(baseline_images=['pcolormesh_positive_other_cmap'],
extensions=['png'])
def test_pcolormesh_positive_other_cmap():
red_purple = brewer2mpl.get_map('RdPu', 'sequential', 8).mpl_colormap
fig, ax = plt.subplots(1)
np.random.seed(10)
ppl.pcolormesh(fig, ax, np.random.uniform(size=(10, 10)),
xticklabels=string.uppercase[:10],
yticklabels=string.lowercase[-10:],
cmap=red_purple)
# fig.savefig(
# '%s/baseline_images/test_pcolormesh/pcolormesh_positive_other_cmap.png' %
# os.path.dirname(__file__))
@image_comparison(baseline_images=['pcolormesh_lognorm'],
extensions=['png'])
def test_pcolormesh_lognorm():
fig, ax = plt.subplots(1)
np.random.seed(10)
x = np.abs(np.random.randn(10, 10))
ppl.pcolormesh(fig, ax, x,
norm=LogNorm(vmin=x.min().min(), vmax=x.max().max()))
# fig.savefig('%s/baseline_images/test_pcolormesh/test_pcolormesh_lognorm.png' %
# os.path.dirname(__file__))
if __name__ == '__main__':
import nose
nose.runmodule(argv=['-s', '--with-doctest']) | [
"[email protected]"
] | |
a32e00000f109f3f2e8079952c3278071e27cf0f | 00c7bd96f1afab807746f1f7f013d4aadc5f6a6e | /sakura/common/types.py | 32cb4d9e52fd22a39835c2c8c60f49825f3f0bb7 | [] | no_license | sakura-team/sakura | 350ae27bdf5c3e7c338c04ec33fb50f4cdc7bfb4 | 306bfe82ffd6b204b0b574bb7f75b35712a3202f | refs/heads/master | 2021-06-02T01:30:14.294572 | 2021-03-04T10:16:44 | 2021-03-04T10:16:44 | 61,307,818 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,271 | py | import numpy as np
from sakura.common.errors import APIRequestError
# Strings whose length is known to be lower than NUMPY_EMBEDDED_STR_MAX_LEN
# will be encoded directly in numpy arrays.
# Others will be saved as an object pointer in numpy arrays.
NUMPY_EMBEDDED_STR_MAX_LEN = 16
SAKURA_INTEGER_TYPES = ('int8', 'int16', 'int32', 'int64',
'uint8', 'uint16', 'uint32', 'uint64')
SAKURA_FLOATING_TYPES = ('float32', 'float64')
SAKURA_NUMERIC_TYPES = SAKURA_INTEGER_TYPES + SAKURA_FLOATING_TYPES
SAKURA_NUMPY_TYPES = SAKURA_NUMERIC_TYPES + ('bool',)
def sakura_type_to_np_dtype(col_type, **params):
if col_type == 'date':
return np.dtype('float64')
if col_type == 'opaque':
return np.dtype(object)
if col_type in ('string', 'geometry'):
max_len = params.get('max_length')
if max_len is not None and max_len < NUMPY_EMBEDDED_STR_MAX_LEN:
return np.dtype(('str', max_len))
else:
return np.dtype(object)
if col_type in SAKURA_NUMPY_TYPES:
return np.dtype(col_type)
raise NotImplementedError('Do not know how to translate sakura type %s to a numpy dtype.' % repr(col_type))
def np_dtype_to_sakura_type(dt):
if dt.name in SAKURA_NUMPY_TYPES:
return dt.name, {}
if dt.name == 'object':
return 'opaque', {}
if dt.type == np.str_:
length_chars = str(dt).strip('<>U')
if length_chars == '':
max_length = 0
else:
max_length = int(length_chars)
if (max_length == 0):
return 'string', {} # unknown length
else:
return 'string', { 'max_length': max_length }
raise NotImplementedError('Do not know how to translate %s to a sakura type.' % repr(dt))
def verify_sakura_type_conversion(old_type, new_type):
if (old_type, new_type) not in (
('opaque', 'string'),
('opaque', 'geometry'),
('string', 'geometry'),
('float64', 'date')):
raise APIRequestError("Cannot convert sakura type '%s' to '%s'!", (old_type, new_type))
def is_numeric_type(sakura_type):
return sakura_type in SAKURA_NUMERIC_TYPES
def is_floating_type(sakura_type):
return sakura_type in SAKURA_FLOATING_TYPES
| [
"[email protected]"
] | |
614462b6940c9c08b08c24650c5683c4986c8d17 | 42d58b23f446a48907d965794a2ae1dc4ad751ab | /347. Top K Frequent Elements.py | 4d70eebffd7020a5f6c65cb4f2b11935dad21ace | [] | no_license | AsadullahFarooqi/LeetCode | fabec1cad1781d0300cec2931545b92dd1390900 | aecc4efe8e0561aa4dd8a8b7f755c19982c6c2ef | refs/heads/master | 2022-11-03T08:01:47.656348 | 2022-10-11T06:19:56 | 2022-10-11T06:19:56 | 187,672,723 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,189 | py | """
Given a non-empty array of integers, return the k most frequent elements.
Example 1:
Input: nums = [1,1,1,2,2,3], k = 2
Output: [1,2]
Example 2:
Input: nums = [1], k = 1
Output: [1]
Note:
You may assume k is always valid, 1 ≤ k ≤ number of unique elements.
Your algorithm's time complexity must be better than O(n log n), where n is the array's size.
"""
def topKFrequent(nums, k):
"""The algorithm works in the following steps
1 - It makes an hash table to store the # of appearence
2 - Sorting the hash table keys by their values in reverse order
3 - Returning the first k values
Args:
nums (TYPE): Description
k (TYPE): Description
Returns:
TYPE: Description
"""
# step 1
count_hash = {}
for i in nums:
if i in count_hash:
count_hash[i] += 1
continue
count_hash[i] = 1
# step 2
count_hash = sorted(count_hash, reverse=True, key=lambda item: count_hash[item])
# steop 3
return count_hash[:k]
if __name__ == '__main__':
# n = [1,1,1,2,2,3]?
# n = [1]
n = [3,0,1,0]
k = 1
print(topKFrequent(n, k))
| [
"[email protected]"
] | |
5980dfe80dbe7976918aa72251a6196f00d24561 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/420/usersdata/329/88113/submittedfiles/exe11.py | fcae8958e71f64e9a6bacbbd05f96f381947b027 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | # -*- coding: utf-8 -*-
n = int(input("digite um numero com 8 algarismos: "))
soma = 0
while n < 100000000:
resto = n % 10
n = (n - resto)/10
soma = soma + resto
print ('%d' % soma)
while n > 999999999:
resto = n % 10
n = (n - resto)/10
soma = soma + resto
print ('%d' % soma)
while n > 100000000:
print('NAO SEI')
while n < 99999999:
print('NAO SEI')
| [
"[email protected]"
] | |
38df09d7f8f9529aecc3adc8d1a17a4cdafadc24 | 25e7d840203e705c6a68aed079cc9844954b9536 | /torch/_dynamo/variables/builtin.py | 111d5415c77602d9e98273b3bb9af90d2ad46f9e | [
"BSD-2-Clause",
"LicenseRef-scancode-secret-labs-2011",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
] | permissive | yf225/pytorch | 874892cd9d0f7bb748e469cfca23a3f503ea4265 | 39590d06c563d830d02b9f94611ab01f07133c97 | refs/heads/main | 2023-07-24T06:17:16.324006 | 2023-04-24T18:22:54 | 2023-04-24T18:22:59 | 113,096,813 | 1 | 3 | NOASSERTION | 2023-08-29T18:46:16 | 2017-12-04T21:25:08 | Python | UTF-8 | Python | false | false | 48,433 | py | import functools
import inspect
import itertools
import logging
import math
import operator
import types
from typing import Dict, List
import torch
from torch import sym_float, sym_int
from .. import config, variables
from ..allowed_functions import is_allowed
from ..exc import unimplemented, Unsupported, UserError, UserErrorType
from ..guards import GuardBuilder
from ..replay_record import DummyModule
from ..source import AttrSource, is_constant_source, SuperSource, TypeSource
from ..utils import (
check_constant_args,
check_unspec_python_args,
istype,
proxy_args_kwargs,
specialize_args_kwargs,
)
from .base import MutableLocal, typestr, VariableTracker
from .constant import ConstantVariable, EnumVariable
from .dicts import ConstDictVariable
from .lists import (
BaseListVariable,
ListIteratorVariable,
ListVariable,
TupleIteratorVariable,
TupleVariable,
)
from .tensor import FakeItemVariable, SymNodeVariable, UnspecializedPythonVariable
from .user_defined import UserDefinedVariable
log = logging.getLogger(__name__)
class BuiltinVariable(VariableTracker):
@staticmethod
@functools.lru_cache(None)
def _constant_fold_functions():
fns = {
abs,
all,
any,
bool,
callable,
chr,
divmod,
float,
int,
len,
max,
min,
ord,
pow,
repr,
round,
set,
str,
str.format,
sum,
type,
operator.pos,
operator.neg,
operator.not_,
operator.invert,
operator.pow,
operator.mul,
operator.matmul,
operator.floordiv,
operator.truediv,
operator.mod,
operator.add,
operator.sub,
operator.getitem,
operator.lshift,
operator.rshift,
operator.and_,
operator.or_,
operator.xor,
operator.ipow,
operator.imul,
operator.imatmul,
operator.ifloordiv,
operator.itruediv,
operator.imod,
operator.iadd,
operator.isub,
operator.ilshift,
operator.irshift,
operator.iand,
operator.ixor,
operator.ior,
operator.index,
}
fns.update(x for x in math.__dict__.values() if isinstance(x, type(math.sqrt)))
return fns
def can_constant_fold_through(self):
return self.fn in self._constant_fold_functions()
@staticmethod
@functools.lru_cache(None)
def _fx_graph_functions():
fns = {
operator.pos,
operator.neg,
operator.not_,
operator.invert,
operator.pow,
operator.mul,
operator.matmul,
operator.floordiv,
operator.truediv,
operator.mod,
operator.add,
operator.sub,
operator.getitem,
operator.lshift,
operator.rshift,
operator.and_,
operator.or_,
operator.xor,
operator.ipow,
operator.imul,
operator.imatmul,
operator.ifloordiv,
operator.itruediv,
operator.imod,
operator.iadd,
operator.isub,
operator.ilshift,
operator.irshift,
operator.iand,
operator.ixor,
operator.ior,
}
return fns
@staticmethod
@functools.lru_cache(None)
def _binops():
# function -> ([forward name, reverse name, in-place name], in-place op)
fns = {
operator.add: (["__add__", "__radd__", "__iadd__"], operator.iadd),
operator.sub: (["__sub__", "__rsub__", "__isub__"], operator.isub),
operator.mul: (["__mul__", "__rmul__", "__imul__"], operator.imul),
operator.truediv: (
["__truediv__", "__rtruediv__", "__itruediv__"],
operator.itruediv,
),
operator.floordiv: (
["__floordiv__", "__rfloordiv__", "__ifloordiv__"],
operator.ifloordiv,
),
operator.mod: (["__mod__", "__rmod__", "__imod__"], operator.imod),
pow: (["__pow__", "__rpow__", "__ipow__"], operator.ipow),
operator.pow: (["__pow__", "__rpow__", "__ipow__"], operator.ipow),
# NB: The follow binary operators are not supported for now, since the
# corresponding magic methods aren't defined on SymInt / SymFloat:
# operator.matmul
# divmod
# operator.lshift
# operator.rshift
# operator.and_
# operator.or_
# operator.xor
}
return fns
@staticmethod
@functools.lru_cache(None)
def _binop_handlers():
# Multiple dispatch mechanism defining custom binop behavior for certain type
# combinations. Handlers are attempted in order, and will be used if the type checks
# match. They are expected to have the signature:
# fn(tx, arg0: VariableTracker, arg1: VariableTracker, options) -> VariableTracker
# Override table contains: op_fn -> [list of handlers]
op_handlers = {}
for (
op,
(magic_method_names, in_place_op),
) in BuiltinVariable._binops().items():
op_handlers[op] = []
op_handlers[in_place_op] = []
forward_name, reverse_name, inplace_name = magic_method_names
# User-defined args (highest precedence)
def user_defined_handler(
tx,
a,
b,
options,
forward_name=forward_name,
reverse_name=reverse_name,
):
# Manually handle reversing logic if needed (e.g. call __radd__)
# TODO: If we expand this to handle tensor args, we need to manually
# handle cases like this:
#
# class A(int):
# def __radd__(self, other):
# print("woof")
# torch.randn(3) + A(3)
#
# In this example, A.__radd__() is not called -> nothing is printed, because
# Tensor.__add__ only does a subtype test against int, ignoring the subclass.
# To be fully correct, we should not call A.__radd__() here, and there may be
# other cases to reason about and add exceptions for.
if isinstance(a, UserDefinedVariable):
return a.call_method(tx, forward_name, [b], {})
else:
return b.call_method(tx, reverse_name, [a], {})
op_handlers[op].append(
((UserDefinedVariable, VariableTracker), user_defined_handler)
)
op_handlers[op].append(
((VariableTracker, UserDefinedVariable), user_defined_handler)
)
def user_defined_inplace_handler(
tx, a, b, options, forward_name=inplace_name
):
return a.call_method(tx, forward_name, [b], {})
op_handlers[in_place_op].append(
((UserDefinedVariable, VariableTracker), user_defined_inplace_handler)
)
op_handlers[in_place_op].append(
((VariableTracker, UserDefinedVariable), user_defined_inplace_handler)
)
# Dynamic shape args
def dynamic_handler(tx, a, b, options, fn=op):
from .builder import wrap_fx_proxy
return wrap_fx_proxy(
tx,
tx.output.create_proxy(
"call_function", fn, *proxy_args_kwargs([a, b], {})
),
**options,
)
op_handlers[op].append(
((SymNodeVariable, VariableTracker), dynamic_handler)
)
op_handlers[op].append(
((VariableTracker, SymNodeVariable), dynamic_handler)
)
# NB: Prefer out-of-place op when calling in-place op to generate valid graph
op_handlers[in_place_op].append(
((SymNodeVariable, VariableTracker), dynamic_handler)
)
op_handlers[in_place_op].append(
((VariableTracker, SymNodeVariable), dynamic_handler)
)
# Special cases - lower precedence but still prefer these over constant folding
# List-like addition (e.g. [1, 2] + [3, 4])
def tuple_add_handler(tx, a, b, options):
return TupleVariable(a.items + list(b.unpack_var_sequence(tx)), **options)
list_like_addition_handlers = [
# NB: Prefer the tuple-specific logic over base logic because of
# some SizeVariable weirdness. Specifically, the tuple-specific logic
# drops the subclass type (e.g. SizeVariable) and returns TupleVariables.
(
(TupleVariable, TupleVariable),
tuple_add_handler,
),
(
(TupleVariable, ConstantVariable),
tuple_add_handler,
),
(
(ConstantVariable, TupleVariable),
lambda tx, a, b, options: TupleVariable(
list(a.unpack_var_sequence(tx)) + b.items, **options
),
),
(
(BaseListVariable, BaseListVariable),
lambda tx, a, b, options: type(a)(a.items + b.items, **options),
),
]
op_handlers[operator.add].extend(list_like_addition_handlers)
def list_iadd_handler(tx, a, b, options):
if not a.mutable_local or not b.has_unpack_var_sequence(tx):
# Handler doesn't apply
return None
return tx.replace_all(
a,
ListVariable(
list(a.items) + list(b.unpack_var_sequence(tx)),
regen_guards=False,
**options,
),
)
list_like_iadd_handlers = [
(
(ListVariable, VariableTracker),
list_iadd_handler,
),
(
(TupleVariable, TupleVariable),
tuple_add_handler,
),
(
(TupleVariable, ConstantVariable),
tuple_add_handler,
),
]
op_handlers[operator.iadd].extend(list_like_iadd_handlers)
# List-like expansion (e.g. [1, 2, 3] * 3)
def expand_list_like(tx, lst, const, options):
return lst.__class__(
items=lst.items * const.as_python_constant(),
mutable_local=MutableLocal(),
**options,
)
list_like_expansion_handlers = [
((ListVariable, ConstantVariable), expand_list_like),
((TupleVariable, ConstantVariable), expand_list_like),
(
(ConstantVariable, ListVariable),
lambda tx, a, b, options: expand_list_like(tx, b, a, options),
),
(
(ConstantVariable, TupleVariable),
lambda tx, a, b, options: expand_list_like(tx, b, a, options),
),
]
op_handlers[operator.mul].extend(list_like_expansion_handlers)
return op_handlers
@staticmethod
def _find_binop_handler(op, a, b):
handlers = BuiltinVariable._binop_handlers()
if op not in handlers:
return None
# Return first handler that matches the type checks
for (type1, type2), handler in handlers[op]:
if isinstance(a, type1) and isinstance(b, type2):
return handler
return None
def can_insert_in_graph(self):
return self.fn in self._fx_graph_functions()
def __init__(self, fn, **kwargs):
super().__init__(**kwargs)
self.fn = fn
def __str__(self):
if self.fn is None:
name = "None"
else:
name = self.fn.__name__
return f"{self.__class__.__name__}({name})"
def python_type(self):
return type(self.fn)
def as_python_constant(self):
return self.fn
def reconstruct(self, codegen):
name = self.fn.__name__
assert self.fn.__module__ == "builtins"
assert name not in codegen.tx.f_globals, "shadowed global"
return [codegen.create_load_global(name, False, add=True)]
def constant_args(self, *args, **kwargs):
return check_constant_args(args, kwargs)
def tensor_args(self, *args, **kwargs):
return any(
isinstance(i, variables.TensorVariable)
for i in itertools.chain(args, kwargs.values())
) and not any(
isinstance(i, variables.GetAttrVariable)
for i in itertools.chain(args, kwargs.values())
)
def unspec_python_args(self, *args, **kwargs):
return check_unspec_python_args(args, kwargs)
@staticmethod
def unwrap_unspec_args_kwargs(args, kwargs):
unwrapped_args = []
unwrapped_kwargs = {}
for x in args:
if isinstance(
x,
(variables.UnspecializedPythonVariable,),
):
unwrapped_args.append(x.raw_value)
else:
unwrapped_args.append(x.as_python_constant())
for k, v in kwargs:
if isinstance(
x,
(variables.UnspecializedPythonVariable,),
):
unwrapped_kwargs.update({k: v.raw_value})
else:
unwrapped_kwargs.update({k: v.as_python_constant()})
return unwrapped_args, unwrapped_kwargs
def call_function(
self, tx, args: "List[VariableTracker]", kwargs: "Dict[str, VariableTracker]"
) -> "VariableTracker":
from .builder import wrap_fx_proxy, wrap_fx_proxy_cls
constant_args = check_constant_args(args, kwargs)
tensor_args = self.tensor_args(*args, **kwargs)
unspec_python_args = self.unspec_python_args(*args, **kwargs)
options = VariableTracker.propagate(self, args, kwargs.values())
has_constant_handler = self.can_constant_fold_through() and (
constant_args or unspec_python_args
)
assert isinstance(args, (list, tuple))
assert isinstance(kwargs, dict)
if (
self.fn is operator.getitem
and len(args) == 2
and isinstance(args[1], variables.TensorVariable)
and args[1].dtype == torch.bool
and not config.dynamic_shapes
):
unimplemented("dynamic Tensor.__getitem__(bool[])")
# args[0] is list and args[1] is unspec
if self.fn is operator.getitem and not isinstance(
args[0], variables.TensorVariable
):
tensor_args = False
args, kwargs = specialize_args_kwargs(tx, args, kwargs)
if (
self.can_insert_in_graph()
and tensor_args
and not (
self.fn is operator.getitem
and isinstance(args[0], ConstDictVariable)
and isinstance(args[1], variables.TensorVariable)
)
):
try:
fn = self.fn
if self.fn is operator.iadd and isinstance(
args[0], variables.ConstantVariable
):
# Work around weird bug in hf_T5
fn, args = operator.add, [args[1], args[0]]
if self.fn is operator.getitem and isinstance(args[1], SymNodeVariable):
# Standard indexing will force specialization due to
# __index__. Rewrite as a regular torch op which will
# trace fine
fn, args = torch.select, [
args[0],
variables.ConstantVariable(0),
args[1],
]
proxy = tx.output.create_proxy(
"call_function",
fn,
*proxy_args_kwargs(args, kwargs),
)
if any([isinstance(arg, FakeItemVariable) for arg in args]):
return wrap_fx_proxy_cls(
FakeItemVariable,
tx,
proxy,
**options,
)
elif self.unspec_python_args(*args, **kwargs):
_args, _kwargs = self.unwrap_unspec_args_kwargs(args, kwargs)
raw_value = self.fn(*_args, **_kwargs)
need_unwrap = any(
x.need_unwrap
for x in itertools.chain(args, kwargs.values())
if isinstance(x, variables.UnspecializedPythonVariable)
)
return wrap_fx_proxy_cls(
UnspecializedPythonVariable,
tx,
proxy,
raw_value=raw_value,
need_unwrap=need_unwrap,
**options,
)
elif all(isinstance(x, SymNodeVariable) for x in args):
return SymNodeVariable.create(tx, proxy, None, **options)
else:
# Work around for vision_maskrcnn due to precision difference
# specialize the dividend when float divide by tensor
if self.fn is operator.truediv and isinstance(
args[0], variables.UnspecializedPythonVariable
):
args[0] = args[0].convert_to_constant(tx)
return wrap_fx_proxy(tx, proxy, **options)
except NotImplementedError:
unimplemented(f"partial tensor op: {self} {args} {kwargs}")
# Handle cases like int(torch.seed())
# Also handle sym_float to sym_int cases
if self.fn in (int, float) and isinstance(args[0], SymNodeVariable):
fn_ = sym_int if self.fn is int else sym_float
out = wrap_fx_proxy(
tx=tx,
proxy=tx.output.create_proxy(
"call_function",
fn_,
(args[0].as_proxy(),),
{},
),
**options,
)
return out
# Handle binary ops (e.g. __add__ / __radd__, __iadd__, etc.)
# NB: Tensor args are handled above and not here
if len(kwargs) == 0 and len(args) == 2:
# Try to find a handler for the arg types; otherwise, fall through to constant handler
binop_handler = BuiltinVariable._find_binop_handler(
self.fn, args[0], args[1]
)
if binop_handler:
res = binop_handler(tx, args[0], args[1], options)
if res is not None:
return res
handler = getattr(self, f"call_{self.fn.__name__}", None)
if handler:
try:
inspect.signature(handler).bind(tx, *args, **kwargs)
except TypeError as exc:
if not has_constant_handler:
log.warning(
"incorrect arg count %s %s and no constant handler",
handler,
exc,
)
handler = None
if handler:
try:
result = handler(tx, *args, **kwargs)
if result is not None:
return result.add_options(options)
except Unsupported as exc:
if not has_constant_handler:
raise
# Actually, we will handle this just fine
exc.remove_from_stats()
if has_constant_handler:
args, kwargs = specialize_args_kwargs(tx, args, kwargs)
# constant fold
return variables.ConstantVariable(
self.as_python_constant()(
*[x.as_python_constant() for x in args],
**{k: v.as_python_constant() for k, v in kwargs.items()},
),
**options,
)
if self.fn is round:
if len(args) > 0 and isinstance(args[0], SymNodeVariable):
raise UserError(
UserErrorType.STANDARD_LIBRARY,
"Calling round() on symbolic value is not supported. "
"You can use floor() to implement this functionality",
)
return super().call_function(tx, args, kwargs)
def _call_min_max(self, tx, *args):
if len(args) == 1 and args[0].has_unpack_var_sequence(tx):
# expand iterable
items = args[0].unpack_var_sequence(tx)
return self._call_min_max_seq(tx, items)
elif len(args) == 2:
return self._call_min_max_binary(tx, args[0], args[1])
elif len(args) > 2:
return self._call_min_max_seq(tx, args)
def _call_min_max_seq(self, tx, items):
assert len(items) > 0
if len(items) == 1:
return items[0]
return functools.reduce(functools.partial(self._call_min_max_binary, tx), items)
def _call_min_max_binary(self, tx, a, b):
if self.tensor_args(a, b):
if not isinstance(a, variables.TensorVariable):
a, b = b, a
assert isinstance(a, variables.TensorVariable)
# result of an item call is a scalar convert to a tensor
if isinstance(a, FakeItemVariable):
a = variables.TorchVariable(torch.tensor).call_function(tx, [a], {})
# Dynamic input does not get resolved, rather, gets stored as call_function
if isinstance(a, SymNodeVariable) or isinstance(b, SymNodeVariable):
from .builder import wrap_fx_proxy
return wrap_fx_proxy(
tx=tx,
proxy=tx.output.create_proxy(
"call_function",
self.fn,
*proxy_args_kwargs([a, b], {}),
),
**VariableTracker.propagate(self, [a, b]),
)
# convert min/max to torch ops
if b.is_python_constant():
kwargs = {"min": b} if (self.fn is max) else {"max": b}
result = variables.TorchVariable(torch.clamp).call_function(
tx, [a], kwargs
)
else:
fn = {max: torch.maximum, min: torch.minimum}[self.fn]
result = variables.TorchVariable(fn).call_function(tx, [a, b], {})
# return unspec if both a, b are unspec or const
if all(
isinstance(
i,
(
variables.UnspecializedPythonVariable,
variables.ConstantVariable,
),
)
for i in [a, b]
):
if any([isinstance(val, FakeItemVariable) for val in [a, b]]):
return variables.FakeItemVariable.from_tensor_variable(result)
if b.is_python_constant():
raw_b = b.as_python_constant()
else:
raw_b = b.raw_value
if self.fn is max:
raw_res = max(a.raw_value, raw_b)
else:
raw_res = min(a.raw_value, raw_b)
need_unwrap = any(
x.need_unwrap
for x in [a, b]
if isinstance(x, variables.UnspecializedPythonVariable)
)
return variables.UnspecializedPythonVariable.from_tensor_variable(
result, raw_res, need_unwrap
)
# otherwise return tensor
else:
return result
elif isinstance(a, variables.ConstantVariable) and isinstance(
b, variables.ConstantVariable
):
if self.fn is max:
return variables.ConstantVariable(max(a.value, b.value))
else:
return variables.ConstantVariable(min(a.value, b.value))
elif isinstance(a, SymNodeVariable) or isinstance(b, SymNodeVariable):
proxy = tx.output.create_proxy(
"call_function", self.fn, *proxy_args_kwargs([a, b], {})
)
return SymNodeVariable.create(tx, proxy, None)
else:
unimplemented(f"unsupported min / max over args {str(a)}, {str(b)}")
call_min = _call_min_max
call_max = _call_min_max
def call_range(self, tx, *args):
if self.unspec_python_args(*args) or self.constant_args(*args):
args, _ = specialize_args_kwargs(tx, args, {})
return variables.RangeVariable(args)
elif self._dynamic_args(*args):
def guard_if_dyn(arg):
if isinstance(arg, SymNodeVariable):
return arg.evaluate_expr(tx.output)
elif isinstance(arg, ConstantVariable):
return arg.as_python_constant()
return arg
args = [variables.ConstantVariable(guard_if_dyn(arg)) for arg in args]
return variables.RangeVariable(args)
# None no-ops this handler and lets the driving function proceed
return None
def _dynamic_args(self, *args, **kwargs):
return any([isinstance(x, SymNodeVariable) for x in args]) or any(
[isinstance(x, SymNodeVariable) for x in kwargs.values()]
)
def call_slice(self, tx, *args):
return variables.SliceVariable(args)
def _dyn_proxy(self, tx, *args, **kwargs):
from .builder import wrap_fx_proxy
options = VariableTracker.propagate(self, args, kwargs.values())
return wrap_fx_proxy(
tx,
tx.output.create_proxy(
"call_function", self.fn, *proxy_args_kwargs(args, kwargs)
),
**options,
)
def _call_iter_tuple_list(self, tx, obj=None, *args, **kwargs):
if self._dynamic_args(*args, **kwargs):
return self._dyn_proxy(tx, *args, **kwargs)
cls = variables.BaseListVariable.cls_for(self.fn)
if obj is None:
return cls(
[],
mutable_local=MutableLocal(),
)
elif obj.has_unpack_var_sequence(tx):
guards = set()
if obj.source and not is_constant_source(obj.source):
if isinstance(obj, TupleIteratorVariable):
guards.add(obj.source.make_guard(GuardBuilder.TUPLE_ITERATOR_LEN))
else:
guards.add(obj.source.make_guard(GuardBuilder.LIST_LENGTH))
return cls(
list(obj.unpack_var_sequence(tx)),
mutable_local=MutableLocal(),
guards=guards,
).add_options(self, obj)
call_iter = _call_iter_tuple_list
call_tuple = _call_iter_tuple_list
call_list = _call_iter_tuple_list
@staticmethod
def is_supported_call_dict_arg(tx, arg):
return (
arg is None
or isinstance(arg, ConstDictVariable)
or (
isinstance(
arg,
(
ListVariable,
TupleVariable,
ListIteratorVariable,
),
)
and all(
isinstance(x, (ListVariable, TupleVariable))
and isinstance(
x.unpack_var_sequence(tx)[0], (ConstantVariable, EnumVariable)
)
for x in arg.unpack_var_sequence(tx)
)
)
)
def call_callable(self, tx, arg):
from .functions import BaseUserFunctionVariable
if isinstance(
arg, (variables.UserDefinedClassVariable, BaseUserFunctionVariable)
):
return variables.ConstantVariable(True).add_options(arg)
@staticmethod
def call_dict_helper(tx, user_cls, arg, **options):
if arg is None:
return ConstDictVariable(
{}, user_cls, mutable_local=MutableLocal()
).add_options(options)
elif isinstance(arg, variables.ConstDictVariable):
return arg.clone(
user_cls=user_cls, mutable_local=MutableLocal()
).add_options(options)
elif isinstance(
arg,
(
ListVariable,
TupleVariable,
ListIteratorVariable,
),
):
items = user_cls()
for x in arg.unpack_var_sequence(tx):
k = x.unpack_var_sequence(tx)[0].as_python_constant()
v = x.unpack_var_sequence(tx)[1]
items.update({k: v})
return ConstDictVariable(
items, user_cls, mutable_local=MutableLocal()
).add_options(options)
else:
raise AssertionError("call_dict_helper with illegal arg")
def call_dict(self, tx, *args, **kwargs):
if not (args or kwargs):
return self.call_dict_helper(tx, dict, None)
elif (
not kwargs
and len(args) == 1
and self.is_supported_call_dict_arg(tx, args[0])
):
return self.call_dict_helper(tx, dict, args[0])
elif not args and kwargs:
return variables.ConstDictVariable(
dict(kwargs), user_cls=dict, mutable_local=MutableLocal()
)
else:
unimplemented(f"dict(): {args} {kwargs}")
def call_zip(self, tx, *args):
options = VariableTracker.propagate(self, args)
if all(x.has_unpack_var_sequence(tx) for x in args):
items = [
variables.TupleVariable(list(item), **options)
for item in zip(*[arg.unpack_var_sequence(tx) for arg in args])
]
return variables.TupleVariable(items, **options)
def call_enumerate(self, tx, *args):
options = VariableTracker.propagate(self, args)
if len(args) == 1:
start = 0
else:
assert len(args) == 2
assert isinstance(args[1], variables.ConstantVariable)
start = args[1].as_python_constant()
if args[0].has_unpack_var_sequence(tx):
items = [
variables.TupleVariable(
[variables.ConstantVariable(idx, **options), var],
**options,
)
for idx, var in enumerate(args[0].unpack_var_sequence(tx), start)
]
return variables.TupleVariable(items, **options)
def call_len(self, tx, *args, **kwargs):
return args[0].call_method(tx, "__len__", args[1:], kwargs)
def call_getitem(self, tx, *args, **kwargs):
if self.unspec_python_args(*args, **kwargs):
args, kwargs = specialize_args_kwargs(tx, args, kwargs)
return args[0].call_method(tx, "__getitem__", args[1:], kwargs)
def call_isinstance(self, tx, arg, isinstance_type):
arg_type = arg.python_type()
isinstance_type = isinstance_type.as_python_constant()
if isinstance(arg, variables.TensorVariable) and arg.dtype is not None:
return variables.ConstantVariable(arg.call_isinstance(isinstance_type))
# UserDefinedObject with C extensions can have torch.Tensor attributes,
# so break graph.
if isinstance(arg, variables.UserDefinedObjectVariable) and isinstance(
arg.value, types.MemberDescriptorType
):
unimplemented(
f"isinstance called on UserDefinedClass {arg} {isinstance_type}"
)
# handle __instancecheck__ defined in user class
if (
isinstance(arg, variables.UserDefinedObjectVariable)
and "__instancecheck__" in isinstance_type.__class__.__dict__
):
return variables.ConstantVariable(
isinstance_type.__class__.__instancecheck__(isinstance_type, arg.value)
)
try:
val = issubclass(arg_type, isinstance_type)
except TypeError:
val = arg_type is isinstance_type
return variables.ConstantVariable(val)
def call_super(self, tx, a, b):
source = (
None
if a.source is None or b.source is None
else SuperSource(a.source, b.source)
)
return variables.SuperVariable(a, b, source=source)
def call_next(self, tx, arg):
if isinstance(arg, variables.ListIteratorVariable):
val, next_iter = arg.next_variables()
tx.replace_all(arg, next_iter)
return val
elif isinstance(arg, variables.BaseListVariable):
return arg.items[0].add_options(self, arg)
def call_hasattr(self, tx, obj, attr):
if attr.is_python_constant():
name = attr.as_python_constant()
return obj.call_hasattr(tx, name).add_options(self, obj, attr)
def call_map(self, tx, fn, seq):
if seq.has_unpack_var_sequence(tx):
items = [fn.call_function(tx, [x], {}) for x in seq.unpack_var_sequence(tx)]
return variables.TupleVariable(items).add_options(self, fn, seq)
def call_sum(self, tx, seq, **kwargs):
# Special case for sum on tuple of floats and ints
if (
isinstance(seq, (variables.ListVariable, variables.TupleVariable))
and all(
[
isinstance(x, variables.ConstantVariable)
and isinstance(x.value, (int, float))
for x in seq.items
]
)
and not kwargs
):
new_list = [x.value for x in seq.items]
return variables.ConstantVariable(sum(new_list))
if seq.has_unpack_var_sequence(tx):
start = kwargs.pop(
"start", variables.ConstantVariable(0)
).as_python_constant()
assert not kwargs
items = seq.unpack_var_sequence(tx)[start:]
return BuiltinVariable(functools.reduce).call_function(
tx,
[
BuiltinVariable(operator.add),
variables.TupleVariable(items),
variables.ConstantVariable(0).add_options(self, seq),
],
{},
)
def call_reduce(self, tx, function, iterable, initializer=None):
if iterable.has_unpack_var_sequence(tx):
items = iterable.unpack_var_sequence(tx)
if initializer is None:
value, items = items[0], items[1:]
else:
value = initializer
for element in items:
value = function.call_function(tx, [value, element], {})
return value
def call_getattr(
self, tx, obj: VariableTracker, name_var: VariableTracker, default=None
):
from . import (
ConstantVariable,
GetAttrVariable,
PythonModuleVariable,
TorchVariable,
UserFunctionVariable,
)
from .builder import VariableBuilder
options = VariableTracker.propagate(self, obj, name_var)
guards = options["guards"]
name = name_var.as_python_constant()
if not name_var.is_python_constant():
unimplemented("non-const getattr() name")
if tx.output.side_effects.is_attribute_mutation(obj):
try:
# re-read a pending side effect?
return tx.output.side_effects.load_attr(obj, name).add_options(options)
except KeyError:
pass
if default is not None:
hasattr_var = self.call_hasattr(tx, obj, name_var)
guards.update(hasattr_var.guards)
assert hasattr_var.as_python_constant() in (True, False)
if not hasattr_var.as_python_constant():
return default.add_guards(guards)
if obj.source:
source = AttrSource(obj.source, name)
options["source"] = source
else:
source = None
if isinstance(obj, variables.NNModuleVariable):
return obj.var_getattr(tx, name).add_options(options)
elif isinstance(obj, variables.TensorVariable) and name == "grad":
if source:
# We are going to be raising this tensor as grapharg. So, ensure
# that we have real grad value instead of fake tensor value.
# Walk through the inputs of the subgraph and find if we already
# have the original tensor stored in the graphargs.
for grapharg in tx.output.graphargs:
if grapharg.source == source.base:
example_value = grapharg.example.grad
return VariableBuilder(tx, source)(example_value).add_options(
options
)
unimplemented("tensor grad")
else:
unimplemented("tensor grad")
elif isinstance(
obj,
(
variables.TensorVariable,
variables.NamedTupleVariable,
variables.ConstantVariable,
variables.UserDefinedClassVariable,
variables.UserDefinedObjectVariable,
),
):
try:
return (
obj.var_getattr(tx, name).clone(source=source).add_options(options)
)
except NotImplementedError:
return GetAttrVariable(obj, name, **options)
elif isinstance(obj, TorchVariable):
member = getattr(obj.value, name)
if is_allowed(member):
return TorchVariable(member, **options)
elif ConstantVariable.is_literal(member):
return ConstantVariable(member, **options)
else:
return VariableBuilder(tx, source)(member).add_guards(guards)
elif isinstance(obj, (PythonModuleVariable, DummyModule)):
member = obj.value.__dict__[name]
if config.replay_record_enabled:
tx.exec_recorder.record_module_access(obj.value, name, member)
return VariableBuilder(tx, source)(member).add_guards(guards)
elif istype(obj, UserFunctionVariable) and name in ("__name__", "__module__"):
return ConstantVariable(
getattr(obj.fn, name), **VariableTracker.propagate(obj)
)
else:
try:
return (
obj.var_getattr(tx, name).clone(source=source).add_options(options)
)
except NotImplementedError:
return GetAttrVariable(obj, name, **options)
def call_setattr(
self, tx, obj: VariableTracker, name_var: VariableTracker, val: VariableTracker
):
if isinstance(obj, variables.DataClassVariable):
return obj.call_method(tx, "__setattr__", [name_var, val], {})
elif (
tx.output.side_effects.is_attribute_mutation(obj)
and name_var.is_python_constant()
):
tx.output.side_effects.store_attr(obj, name_var.as_python_constant(), val)
return val.add_options(self, obj, name_var)
elif isinstance(obj, variables.UserDefinedObjectVariable):
unimplemented(
f"setattr(UserDefinedObjectVariable) {type(obj.value).__setattr__}"
)
elif isinstance(obj, variables.NNModuleVariable):
obj.convert_to_unspecialized(tx)
def call_delattr(self, tx, obj: VariableTracker, name_var: VariableTracker):
return self.call_setattr(tx, obj, name_var, variables.DeletedVariable())
def call_type(self, tx, obj: VariableTracker):
from .builder import VariableBuilder
try:
py_type = obj.python_type()
except NotImplementedError:
py_type = None
if istype(obj, variables.TupleVariable):
return BuiltinVariable(py_type).add_options(self, obj)
if py_type is not None and obj.source:
return VariableBuilder(tx, TypeSource(obj.source))(py_type).add_options(
self, obj
)
raise UserError(
UserErrorType.ANTI_PATTERN,
"Can't call type() on generated custom object. "
"Please use __class__ instead",
)
def call_reversed(self, tx, obj: VariableTracker):
if obj.has_unpack_var_sequence(tx):
items = list(reversed(obj.unpack_var_sequence(tx)))
return variables.TupleVariable(
items, **VariableTracker.propagate(self, obj)
)
def call_sorted(self, tx, obj: VariableTracker, **kwargs):
if (
obj.has_unpack_var_sequence(tx)
and not isinstance(obj, variables.TensorVariable)
and all(x.is_python_constant() for x in obj.unpack_var_sequence(tx))
):
function = kwargs.pop("key", None)
reverse = kwargs.pop(
"reverse", ConstantVariable(False)
).as_python_constant()
assert len(kwargs) == 0
if function:
items = sorted(
obj.unpack_var_sequence(tx),
key=lambda x: function.call_function(
tx, [x], {}
).as_python_constant(),
reverse=reverse,
)
else:
items = sorted(
obj.unpack_var_sequence(tx),
key=lambda x: x.as_python_constant(),
reverse=reverse,
)
return variables.ListVariable(items, **VariableTracker.propagate(self, obj))
def call_chain(self, tx, *args):
if all(obj.has_unpack_var_sequence(tx) for obj in args):
items = []
for obj in args:
items.extend(obj.unpack_var_sequence(tx))
return variables.TupleVariable(
items, **VariableTracker.propagate(self, *args)
)
def call_islice(self, tx, iterable, *args):
if iterable.has_unpack_var_sequence(tx) and all(
x.is_python_constant() for x in args
):
const_args = [x.as_python_constant() for x in args]
items = iterable.unpack_var_sequence(tx)
items = list(itertools.islice(items, *const_args))
return variables.TupleVariable(
items, **VariableTracker.propagate(self, iterable, *args)
)
# neg is a constant fold function, so we only get here if constant fold is not valid
def call_neg(self, tx, a):
if isinstance(a, SymNodeVariable):
return SymNodeVariable.create(
tx,
(operator.neg)(a.as_proxy()),
sym_num=None,
)
# None no-ops this handler and lets the driving function proceed
return None
def call_id(self, tx, *args):
if len(args) > 0 and isinstance(args[0], variables.NNModuleVariable):
nn_mod_variable = args[0]
mod = tx.output.get_submodule(nn_mod_variable.module_key)
return variables.ConstantVariable(id(mod))
else:
unimplemented(f"call_id with args {args}")
def _comparison(self, tx, left, right):
"""
Used to implement comparison operators for different types.
For example, list1 < list2 is implemented differently from tensor1 < tensor2
"""
from . import (
BaseListVariable,
ConstantVariable,
TensorVariable,
UserFunctionVariable,
)
from .lists import SizeVariable
from .tensor import (
supported_const_comparison_ops,
supported_tensor_comparison_ops,
)
op = self.fn
def _unimplemented():
unimplemented(f"comparison {typestr(left)} {op} {typestr(right)}")
if isinstance(left, UserFunctionVariable):
if op not in supported_const_comparison_ops.values():
_unimplemented()
if not isinstance(right, UserFunctionVariable):
_unimplemented()
return ConstantVariable(op(left.fn, right.fn))
# Note, we have a rare BaseListVariable subtype mismatch with valid comparison
# x = torch.randn([3, 3])
# x.size() == (3, 3) # True
# (3, 3) == x.size() # True
if isinstance(left, (SizeVariable, TupleVariable)) and isinstance(
right, (TupleVariable, SizeVariable)
):
return BaseListVariable.list_compare(tx, op, left, right)
if isinstance(left, BaseListVariable):
if not type(left) == type(right): # Mismatch in BaseListVariable subclasses
_unimplemented()
return BaseListVariable.list_compare(tx, op, left, right)
if isinstance(left, TensorVariable):
from .builder import wrap_fx_proxy
if op not in supported_tensor_comparison_ops.values():
_unimplemented()
return wrap_fx_proxy(
tx,
op(left.as_proxy(), right.as_proxy()),
)
if isinstance(left, SymNodeVariable) or isinstance(right, SymNodeVariable):
if op not in supported_tensor_comparison_ops.values():
_unimplemented()
return SymNodeVariable.create(
tx,
op(left.as_proxy(), right.as_proxy()),
sym_num=None,
)
_unimplemented()
# and_ is a constant fold function, so we only get here if constant fold is not valid
def call_and_(self, tx, a, b):
if isinstance(a, SymNodeVariable) and isinstance(b, SymNodeVariable):
return SymNodeVariable.create(
tx,
tx.output.create_proxy(
"call_function", operator.and_, *proxy_args_kwargs([a, b], {})
),
sym_num=None,
)
# None no-ops this handler and lets the driving function proceed
return None
# or_ is a constant fold function, so we only get here if constant fold is not valid
def call_or_(self, tx, a, b):
if isinstance(a, SymNodeVariable) and isinstance(b, SymNodeVariable):
return SymNodeVariable.create(
tx,
tx.output.create_proxy(
"call_function", operator.or_, *proxy_args_kwargs([a, b], {})
),
sym_num=None,
)
# None no-ops this handler and lets the driving function proceed
return None
def call_not_(self, tx, a):
if isinstance(a, SymNodeVariable):
return SymNodeVariable.create(
tx,
tx.output.create_proxy(
"call_function", operator.not_, *proxy_args_kwargs([a], {})
),
sym_num=None,
)
return None
call_eq = _comparison
call_gt = _comparison
call_lt = _comparison
call_ge = _comparison
call_le = _comparison
call_ne = _comparison
call_is_ = _comparison
call_is_not = _comparison
| [
"[email protected]"
] | |
5300271b0b676978f2319aff708095962e6f6c52 | 49cc32d5859e9002cb4b94ade25d72f5f4fe1612 | /CLASE5_PYTHON_UMAKER/codigo5.py | 52b8bdabb03966ebe2a3f86723ba2f2f85b85de5 | [] | no_license | jorgepdsML/DIGITAL-IMAGE-PROCESSING-PYTHON | c8441215b4cf9e912dad1885a82058c1b0bbb872 | 781c8c6d583aebda6381a301cdc33ad4d09f20c5 | refs/heads/master | 2021-06-26T00:06:44.344201 | 2021-01-21T17:41:36 | 2021-01-21T17:41:36 | 194,336,928 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 479 | py | class point():
def __init__(self,a,b):
self.x=a
self.y=b
def coordenada(self):
print(self.x,self.y)
def __add__(self, other):
x=self.x+other.x
y=self.y+other.y
return (x,y)
def __call__(self,*args):
suma=0
for val in args:
suma=suma+val
return suma
#instanciando un nuevo objeto
a1=point(10,10)
#llamando a la función call
d=a1(100,200,1000,500,1000)
print(d) | [
"[email protected]"
] | |
242923c5197a8ee760b120a5605b8afca943eab0 | f99cca94f74c69bc518e298c14140534e18eabd3 | /OrcLib/Test/TestNet.py | 07cd62a60036cd81afff87edaf6b1fcf167c81cd | [] | no_license | pubselenium/OrcTestToolsKit | d6d838d9937d2c4d86941e317cb3ff096b58e52d | f3ccbbceaed4f4996f6907a2f4880c2fd3f82bbb | refs/heads/master | 2021-04-29T05:15:53.240714 | 2016-12-30T09:42:53 | 2016-12-30T09:42:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,478 | py | import unittest
from OrcLib.LibTest import OrcTest
from OrcLib import init_log
from OrcLib.LibNet import OrcParameter
class TestOrcParameter(unittest.TestCase):
def test_send_para(self):
"""
Test get exist option
"""
OrcTest.test_print_begin()
init_log()
_para_01 = OrcParameter().send_para("abc")
OrcTest.test_print_result("Parameter para_01 is: %s, type is %s" % (_para_01, type(_para_01)))
_para_02 = OrcParameter().send_para(["abc", "def"])
OrcTest.test_print_result("Parameter para_02 is: %s, type is %s" % (_para_02, type(_para_02)))
_para_03 = OrcParameter().send_para(None)
OrcTest.test_print_result("Parameter para_03 is: %s, type is %s" % (_para_03, type(_para_03)))
_para_04 = OrcParameter().send_para(120)
OrcTest.test_print_result("Parameter para_04 is: %s, type is %s" % (_para_04, type(_para_04)))
OrcTest.test_print_end()
def test_save_pic(self):
"""
Test get exist option
"""
OrcTest.test_print_begin()
from OrcLib.LibNet import OrcHttpService
service = OrcHttpService("Driver")
service.save_pic("abc.png")
OrcTest.test_print_end()
def test_source_list(self):
"""
Test get exist option
"""
OrcTest.test_print_begin()
from OrcLib.LibNet import OrcResource
from OrcLib.LibNet import OrcResult
resource = OrcResource("BatchDef", "JSON")
result = resource.get(parameter=dict())
if isinstance(result, OrcResult):
OrcTest.test_print_result(result.status, "status")
OrcTest.test_print_result(result.message, "message")
OrcTest.test_print_result(result.data, "data")
else:
print result
OrcTest.test_print_end()
def test_source_sig(self):
"""
Test get exist option
"""
OrcTest.test_print_begin()
from OrcLib.LibNet import OrcResource
from OrcLib.LibNet import OrcResult
resource = OrcResource("BatchDef", "JSON")
result = resource.get(path=1000000024)
if isinstance(result, OrcResult):
OrcTest.test_print_result(result.status, "status")
OrcTest.test_print_result(result.message, "message")
OrcTest.test_print_result(result.data, "data")
else:
print result
OrcTest.test_print_end()
| [
"[email protected]"
] | |
396b07af836678cbf34f87d2c44a64e0513292ea | 98cd5ddf45a73aea64bbfac0c0104829d7231b81 | /T - Image + Hexagon/main.py | 4a4ad6510a74b8e54b6218bb846ee6c486774044 | [] | no_license | atheis4/ETC_Modes_Extra | 42508d523cfe632a3335e29f6e1e40af91df231b | d0ce221562105382a7a73cc6d280f4ad0eabf6f3 | refs/heads/master | 2022-04-04T11:15:07.335910 | 2020-01-03T20:27:32 | 2020-01-03T20:27:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,426 | py | import os
import pygame
import time
import random
import glob
import pygame.gfxdraw
images = []
image_index = 0
image_x=100
image_y=100
image_size_x=100
image_size_y=100
border_x = 1
border_y = 1
square_start_x = 1
square_start_y = 1
square_end_x = 1
square_end_y = 1
square_size = 50
trigger = False
def setup(screen, etc):
global images, image_index
for filepath in sorted(glob.glob(etc.mode_root + '/Images/*.png')):
filename = os.path.basename(filepath)
print 'loading image file: ' + filename
img = pygame.image.load(filepath)
images.append(img)
def draw(screen, etc):
global trigger, image_x, image_y, image_size_x, image_size_y, images, image_index, square_size, border_x, border_y, square_start_x, square_start_y, square_end_x, square_end_y
color = etc.color_picker()
if etc.audio_trig or etc.midi_note_new :
trigger = True
if trigger == True :
image_x=(random.randrange(-50,1080))
image_y=(random.randrange(-50,600))
image_index += 1
if image_index == len(images) : image_index = 0
image = images[image_index]
image_size_x=int(image.get_width() * etc.knob1)
image_size_y=int(image.get_height() * etc.knob1)
image = pygame.transform.scale(image,(image_size_x, image_size_y))
border_x = int(etc.knob2 * image.get_width()) - (image.get_width() / 2)
border_y = int(etc.knob2 * image.get_height()) - (image.get_height() / 2)
square_start_x = image_x - border_x
square_start_y = image_y - border_y
square_end_x = image_size_x + (border_x*2)
square_end_y = image_size_y + (border_y*2)
pygame.draw.rect(screen, color, (square_start_x, square_start_y, square_end_x, square_end_y), 0)
#TOP TRIANGLE
pygame.gfxdraw.filled_trigon(screen, square_start_x, square_start_y, (square_end_x+image_x-border_x)-1, square_start_y, (image_x+image_size_x/2),square_start_y-((image_size_y+border_y*2)/2) , color)
#BOTTOM TRIGON
pygame.gfxdraw.filled_trigon(screen, square_start_x, image_y+square_end_y-border_y, (square_end_x+image_x-border_x)-1, image_y+square_end_y-border_y, (image_x+image_size_x/2),(image_y+square_end_y-border_y)+((image_size_y+border_y*2)/2) , color)
image.fill((255, 255, 255, etc.knob3 * 255), None, pygame.BLEND_RGBA_MULT)
screen.blit(image, (image_x,image_y))
trigger = False
| [
"[email protected]"
] | |
968a736930a6730a0b2049734b9b83b6de0fac28 | 9a0e25591deef948bd5957ac79131bc01594b0bb | /users/urls/urls.py | 5dcecf097a0cfcdd74dcdd19e0bb389c5cd3a355 | [] | no_license | ursusma/HiCommunity | ad31342d6f5ee0bc28899dc1cb199fbe1085e554 | 81e2340264cce983b889dacdb01bbeeb2cb67f21 | refs/heads/master | 2021-07-25T10:58:04.239531 | 2017-10-26T14:17:05 | 2017-10-26T14:17:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 95 | py | # coding=utf-8
from django.conf.urls import url
from users import views
urlpatterns = [
]
| [
"[email protected]"
] | |
a971057d036c9e02983eea09d044b3cc1531cccc | 526bf18a8695862067c817f432ab197ceb645f39 | /migrations/versions/9e01343b62ef_cars_added_fields.py | 4721e5e3683b75236d82cd6644df0c3fd3d99c76 | [] | no_license | sintimaski/bfs-be | a7fd623911a2220face49a0ef84574f3fd7a09a8 | 964a9c7e9cc876aaf8b0723d6b3f26bd378c3721 | refs/heads/master | 2023-08-02T09:00:44.855055 | 2021-09-22T13:07:01 | 2021-09-22T13:07:01 | 339,531,610 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 659 | py | """cars added fields
Revision ID: 9e01343b62ef
Revises: 172fb3a90b3b
Create Date: 2020-10-19 07:41:26.893114
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9e01343b62ef'
down_revision = '172fb3a90b3b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('car_product', sa.Column('trim', sa.Text(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('car_product', 'trim')
# ### end Alembic commands ###
| [
"[email protected]"
] | |
9d3fb5e9f0d13d0dac39ac54ebcd262cccdd485c | 5219ea9d40a5e6187fc047d0e463ecca47654f72 | /project_name/urls.py | baecf57ddd274ae81eaeab21df6fd7ecd4c440b1 | [] | no_license | wo0dyn/django-project-template | b5bb7ffec3a0ecd90df34fc60b6c13422e7f9de1 | 68a0eec61a09486b662cbdf72b13cd5c7b476810 | refs/heads/master | 2021-01-17T07:24:24.012032 | 2013-06-07T08:22:49 | 2013-06-07T08:22:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 915 | py | from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.http import HttpResponse, HttpResponsePermanentRedirect
from ratelimitbackend import admin
admin.autodiscover()
robots = lambda _: HttpResponse('User-agent: *\nDisallow:\n',
mimetype='text/plain')
favicon = lambda _: HttpResponsePermanentRedirect(
'{0}core/img/favicon.png'.format(settings.STATIC_URL)
)
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('ratelimitbackend.views',
url(r'^login/$', 'login', name='login'),
url(r'^robots.txt$', robots),
url(r'^favicon.ico$', favicon),
)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"[email protected]"
] | |
d308e86366cb8b7a3aace35c26d3ce733fd7b08a | 3c8701e04900389adb40a46daedb5205d479016c | /oldboy-python18/day08-接口-网络/self/网络编程/06-模拟ssh-加上报头/服务端.py | c4da2e73dc660420a2babf440c3e5581b3ee967d | [] | no_license | huboa/xuexi | 681300653b834eaf506f49987dcca83df48e8db7 | 91287721f188b5e24fbb4ccd63b60a80ed7b9426 | refs/heads/master | 2020-07-29T16:39:12.770272 | 2018-09-02T05:39:45 | 2018-09-02T05:39:45 | 73,660,825 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,213 | py | ####建立连接
import socket
import struct
import subprocess
phone=socket.socket(socket.AF_INET,socket.SOCK_STREAM)###tcp
phone.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1) ###复用
phone.bind(('127.0.0.1',8080))
phone.listen(5)
print('server start...')
while True: ###连接循环
conn,client_addr=phone.accept()
print(conn,client_addr)
###基于建立的连接,收发消息
while True:
try:
cmd=conn.recv(1024)
if not cmd:break ###针对对linux异常断开就跳出
print('cmd',cmd)
res=subprocess.Popen(cmd.decode('utf-8'),
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout=res.stdout.read()
stderr=res.stderr.read()
##先发报头(固定长度)
header=struct.pack('i',len(stdout)+len(stderr))
conn.send(header)
##再发真实数据
conn.send(stdout)
conn.send(stderr)
except Exception: ##针对windows异常跳出
break
##挂电话
conn.close()
###关机
phone.close()
| [
"[email protected]"
] | |
0de2e57e651606fa39a419b990f8d4e0e9f98820 | afd74aa3e8c847d557828115f48f60f696fdfe95 | /C38/validate_getattribute.py | 9e557d5b18eea803ad61c04b81201237089827d8 | [
"MIT"
] | permissive | BetTom/learningpython | f1b957607f92b4acf66aba1d22090f519824822a | 47e78041e519ecd2e00de1b32f6416b56ce2616c | refs/heads/master | 2021-10-11T09:45:40.608420 | 2019-01-24T09:44:05 | 2019-01-24T09:44:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,100 | py | class CardHolder(object):
acctlen = 8
retireage = 59.5
def __init__(self, acct, name, age, addr):
self.acct = acct
self.name = name
self.age = age
self.addr = addr
def __getattribute__(self, name):
superget = object.__getattribute__
if name == 'acct':
return superget(self, 'acct')[:-3] + '***'
elif name == 'remain':
return superget(self, 'retireage') - superget(self, 'age')
else:
return superget(self, name)
def __setattr__(self, name, value):
if name == 'name':
value = value.lower().replace(' ', '_')
elif name == 'age':
if value < 0 or value > 150:
raise ValueError('invalid age')
elif name == 'acct':
value = value.replace('-', '')
if len(value) != self.acctlen:
raise TypeError('invalid acct number')
elif name == 'remain':
raise TypeError('cannot set remain')
self.__dict__[name] = value
# object.__setattr__(self, name, value)
| [
"[email protected]"
] | |
9f2a946202864a07e3ec0b8b972e50a4b51e4222 | 1803b6d5b6cd28f6719c2584f28d581811526d26 | /p57_longerthan_specified.py | 81fde2b4965d7b30b5967916920792216a7137a5 | [] | no_license | augustedupin123/python_practice | 0ee2ebd30810f8df82d9e26b8d52328d052e1a5e | 5ba1f9e4598d1eaa7f5f6f36efb5f96ca4be18a0 | refs/heads/master | 2022-12-08T06:15:48.808986 | 2020-08-31T19:16:15 | 2020-08-31T19:16:15 | 266,285,050 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 335 | py | #Write a python program to find the list of words that are longer
#than n from a given list of words.
def list_of_words(l,n):
listreq = []
l1 = l.split()
for i in l1:
if len(i)>n:
listreq.append(i)
return listreq
a = input('enter the list')
n1 = int(input('enter n'))
print (list_of_words(a,n1))
| [
"[email protected]"
] | |
82f465c11b316b7121d832c85659e050bd9a19b4 | 978c9a1dd27a30b32eceed7f1518a26292695891 | /python/2021/codewars/calculateinversions.py | 513e292b0b59d1a782154f9bfaeb3538c3fe3baa | [] | no_license | detcitty/100DaysOfCode | 4da3407bdc4170f9d042f49e6c94a8469f8808f5 | a3d989ea56491f89ece5191d5246166ca01d2602 | refs/heads/master | 2023-08-09T04:45:51.842305 | 2023-07-21T17:02:08 | 2023-07-21T17:02:08 | 178,976,277 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 887 | py | # https://www.codewars.com/kata/537529f42993de0e0b00181f/train/python
from itertools import combinations
def count_inversions(array):
locations = []
for count, value in enumerate(array):
idx = value - 1
diff = idx - count
locations.append(diff)
list_combos = list(combinations(array, 2))
# Find the adjacent
# Try to sort the list and count the number of times it was sorted
for i in range(len(list_combos)):
pass
return(list_combos)
test1 = [1, 2, 3, 4] # => 0 inversions
test2 = [1, 3, 2, 4] # => 1 inversion: 2 and 3
test3 = [4, 1, 2, 3] # => 3 inversions: 4 and 1, 4 and 2, 4 and 3
test4 = [4, 3, 2, 1] # => 6 inversions: 4 and 3, 4 and 2, 4 and 1, 3 and 2, 3 and 1, 2 and 1
test5 = [5, 4, 3, 2, 1] # => 6 inversions: 4 and 3, 4 and 2, 4 and 1, 3 and 2, 3 and 1, 2 and 1
print(count_inversions(test1)) | [
"[email protected]"
] | |
eb96d7ba59e15da0b0f51e76d65639b8b35c5cc1 | 1d277498f96998cbbdc475db17191b7d6dc371ab | /rap/management/commands/play.py | 24ebadfb8155bc29f96f1cdd8ee0b3cd3017fe27 | [] | no_license | everythingability/rap | cfaccfbac75b7ff2522fc9bc7debb0fd3eec3559 | 44e550f1ca0ef68c1277d9904bd546c52d51a3e5 | refs/heads/master | 2022-12-09T07:52:27.961493 | 2020-03-18T19:11:23 | 2020-03-18T19:11:23 | 248,318,782 | 0 | 0 | null | 2022-12-08T03:50:01 | 2020-03-18T19:02:14 | Python | UTF-8 | Python | false | false | 2,544 | py | from django.core.management.base import BaseCommand, CommandError
from rap.models import Project, GTRCategory, HECategory, HEResearchArea, Person, Organisation
import os, sys
import csv, json
cats =["Archaeological Theory",
"Archaeology Of Human Origins",
"Archaeology of Literate Soc.",
"Architecture HTP",
"Environmental planning",
"Heritage Management",
"Landscape & Environ. Archaeol.",
"Prehistoric Archaeology",
"Science-Based Archaeology"]
def fixDate(s): # 01/02/2020 to YYYY-MM-DD
try:
if s !=None:
dItems = s.split("/")
year = dItems[2]
month = dItems[1]
day = dItems[0]
d = f"{year}-{month}-{day}"
return d
else:
return None
except:
return None
dir_path = os.path.dirname(os.path.realpath(__file__))
class Command(BaseCommand):
# python manage.py import_tools file="tools.csv"
help = 'meant to help me get started, importing a lot of initial data etc'
def add_arguments(self, parser):
''#parser.add_argument('file', type=str)
def handle(self, *args, **options):
#filename = options['file']
try:
#Project, GTRCategory, HECategory, HEResearchArea, Person, Organisation
hecategories = HECategory.objects.all()
gtrs = GTRCategory.objects.all()
heresearchareas = HEResearchArea.objects.order_by('hecategory')
previous_category = None
for n,heresearcharea in enumerate(heresearchareas):
category = heresearcharea.hecategory ######### MAKE THE HEADER
if category != previous_category:
total = 0
print("\n")
print(category)
print("'" * 80)
c = 0
these_gtrs = heresearcharea.gtrs.all()
these_ids = []
for t in these_gtrs:
these_ids.append(t.id)
#print (these_ids)
for gtr in these_gtrs:
c = c + Project.objects.filter( gtrs__in=these_ids ).count()
#total = total + c
print( heresearcharea.name, c)
previous_category = category
except Exception as err:
print(str(err))
raise CommandError( print ('Error on line {}'.format(sys.exc_info()[-1].tb_lineno)))
self.stdout.write(self.style.SUCCESS('Done!')) | [
"="
] | = |
958fc768494ec3c7056fc6c7e6555e4a4a2b2dd8 | 34a633e2d60c5adf0e9f420bcc9587ac66b6766b | /kats/tests/models/test_stlf_model.py | ffe21a735545d553826036ca2afecdb1086247f7 | [
"MIT"
] | permissive | kpbMarques/Kats | 5cdd7ac61e23218cb5588ef775ca194224abe739 | 259fdf8f80f628b44f9ee8881f112b1e9fd7a85f | refs/heads/master | 2023-07-02T15:55:30.915358 | 2021-08-10T19:47:44 | 2021-08-10T19:49:02 | 394,783,804 | 1 | 0 | MIT | 2021-08-10T21:19:45 | 2021-08-10T21:19:45 | null | UTF-8 | Python | false | false | 2,529 | py | # Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import unittest
from unittest import TestCase
import pkgutil
import io
import pandas as pd
from kats.consts import TimeSeriesData
from kats.models.stlf import STLFModel, STLFParams
def load_data(file_name):
ROOT = "kats"
if "kats" in os.getcwd().lower():
path = "data/"
else:
path = "kats/data/"
data_object = pkgutil.get_data(ROOT, path + file_name)
return pd.read_csv(io.BytesIO(data_object), encoding="utf8")
class testSTLFModel(TestCase):
def setUp(self):
DATA = load_data("air_passengers.csv")
DATA.columns = ["time", "y"]
self.TSData = TimeSeriesData(DATA)
DATA_daily = load_data("peyton_manning.csv")
DATA_daily.columns = ["time", "y"]
self.TSData_daily = TimeSeriesData(DATA_daily)
DATA_multi = load_data("multivariate_anomaly_simulated_data.csv")
self.TSData_multi = TimeSeriesData(DATA_multi)
def test_fit_forecast(self) -> None:
for method in ["theta", "prophet", "linear", "quadratic"]:
params = STLFParams(m=12, method=method)
m = STLFModel(self.TSData, params)
m.fit()
m.predict(steps=30)
m.predict(steps=30, include_history=True)
params = STLFParams(m=7, method="theta")
m_daily = STLFModel(self.TSData_daily, params)
m_daily.fit()
m_daily.predict(steps=30)
m.plot()
m_daily.predict(steps=30, include_history=True)
m.plot()
# test when m > the length of time series
params = STLFParams(m=10000, method="theta")
self.assertRaises(
ValueError,
STLFModel,
self.TSData_daily,
params,
)
def test_others(self) -> None:
# test param value error
self.assertRaises(
ValueError,
STLFParams,
method="random_model",
m=12,
)
params = STLFParams(m=12, method="theta")
params.validate_params()
# test model param
self.assertRaises(
ValueError,
STLFModel,
self.TSData_multi,
params,
)
# test __str__ method
m = STLFModel(self.TSData, params)
self.assertEqual(m.__str__(), "STLF")
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
0ecab6beb8846f90119b772c94a608c39ed4b8ea | f0417264adb22d064b0b83b5a24ae33208c0a62b | /H2TauTau/scripts/harvest_old.py | e4891de4db0866345f1c6c3f5b38d618c20264e6 | [] | no_license | cbernet/cmgtools-lite | 6ae1a0bfc45ff03b14195ab0f05b353ffde9cd2e | 359209cd4f982cd1b9d8e3cb366de32b7b46113d | refs/heads/htt_9_4_11_cand1_v1 | 2021-01-18T15:56:14.845371 | 2019-10-24T14:00:32 | 2019-10-24T14:00:32 | 86,693,438 | 1 | 0 | null | 2019-06-07T09:04:05 | 2017-03-30T11:09:21 | Python | UTF-8 | Python | false | false | 382 | py | #!/usr/bin/env python
from CMGTools.H2TauTau.harvest.harvest_old import harvest, get_options
if __name__ == '__main__':
options, args = get_options()
src = args[0]
harvest(src,
subdir_pattern=options.subdir_pattern,
tgz_pattern=options.tgz_pattern,
apply_ff=options.apply_ff,
convert_ntuple=options.convert_ntuple)
| [
"[email protected]"
] | |
489a78e8ffb4d1cf110c0af54cad92b01c4d83b7 | f7550c4964dc8f3c59dbcebe39e947bd6a264dba | /9. Generic Trees/take input Tree .py | 035d0395b6c16a2194ca74782181cd193b973a60 | [] | no_license | Jashwanth-k/Data-Structures-and-Algorithms | db5e2e30932e0a35db578c19ae6cff9f147b7c3d | 1ebf9986999a474cb094f3ab04616a46f2887043 | refs/heads/main | 2023-08-25T02:57:17.394322 | 2021-10-11T15:27:56 | 2021-10-11T15:27:56 | 402,448,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 772 | py |
class TreeNode:
def __init__(self,data):
self.data = data
self.children = list()
def printTreeDetailed(root):
if root is None:
return
print(root.data,end=':')
for child in root.children:
if child != None:
print(child.data,end=',')
print()
for child in root.children:
printTreeDetailed(child)
def takeinput():
print('enter root data')
rootdata = int(input())
if rootdata == -1:
return
root = TreeNode(rootdata)
print('enter no of children for:',rootdata)
for i in range(int(input())):
childNode = takeinput()
root.children.append(childNode)
return root
root = takeinput()
printTreeDetailed(root)
| [
"[email protected]"
] | |
bbbb98922649b61e90795c6fd283613ad91677fd | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/response/AlipayTransferThirdpartyBillCreateResponse.py | 5eec748f112c295bae2984605e0dddffe8587281 | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 1,301 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipayTransferThirdpartyBillCreateResponse(AlipayResponse):
def __init__(self):
super(AlipayTransferThirdpartyBillCreateResponse, self).__init__()
self._order_id = None
self._order_type = None
self._payment_id = None
@property
def order_id(self):
return self._order_id
@order_id.setter
def order_id(self, value):
self._order_id = value
@property
def order_type(self):
return self._order_type
@order_type.setter
def order_type(self, value):
self._order_type = value
@property
def payment_id(self):
return self._payment_id
@payment_id.setter
def payment_id(self, value):
self._payment_id = value
def parse_response_content(self, response_content):
response = super(AlipayTransferThirdpartyBillCreateResponse, self).parse_response_content(response_content)
if 'order_id' in response:
self.order_id = response['order_id']
if 'order_type' in response:
self.order_type = response['order_type']
if 'payment_id' in response:
self.payment_id = response['payment_id']
| [
"[email protected]"
] | |
db8858d3d0b03c9346f4b028be2f3a4fc6c900e7 | 37db56765276c0835a2c7e3955c412ce204836c1 | /1732.py | a3221be6890f5768a8a8d1a01f9b713a2f3c54bd | [] | no_license | supperllx/LeetCode | 9d0a3a7258d1cff6afa6e77f61a2e697834914ca | df3a589ea858218f689fe315d134adc957c3debd | refs/heads/master | 2023-05-01T06:57:17.403568 | 2021-05-19T18:29:25 | 2021-05-19T18:34:03 | 288,351,041 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | class Solution:
def largestAltitude(self, gain: List[int]) -> int:
curHeight = 0
maxHeight = 0
for g in gain:
curHeight += g
maxHeight = max(maxHeight, curHeight)
return maxHeight | [
"[email protected]"
] | |
4c2f052e47f331249f8d010f61215fab0048cba4 | 4f2f71beee2fb016550598996e100ce176100dcb | /python/etl/etl.py | 9bbd5ca4f54b1b3e9ae65cbba66894797f2bf174 | [] | no_license | charles-wangkai/exercism | d2723bd160573b2d3ee9051ff63972e5be900d87 | c283a5078e3d0f05ff3d86b2c208ae086d3896a4 | refs/heads/master | 2023-05-11T13:11:23.776323 | 2023-04-30T17:40:56 | 2023-04-30T17:40:56 | 102,832,444 | 2 | 4 | null | 2020-03-14T15:49:13 | 2017-09-08T07:31:36 | C++ | UTF-8 | Python | false | false | 151 | py | def transform(legacy_data):
return {letter.lower(): score
for score, letters in legacy_data.items()
for letter in letters}
| [
"[email protected]"
] | |
69c092950f4db463506ab6a6d11d94f52f9e0535 | c5f7019c52cd91a3d9505943b9d866539f2fb0bc | /synapse/models/transport.py | 189fc28633f8945726bba6d076022c1b92bc10b3 | [
"Apache-2.0"
] | permissive | vertexproject/synapse | ce31699fcb10cb2c870d448915f4d4524247e2d0 | 1808dff78921b4bfdb451a12ee5d03427a5295b9 | refs/heads/master | 2023-09-03T23:48:26.584015 | 2023-08-31T20:34:35 | 2023-08-31T20:34:35 | 37,228,107 | 307 | 63 | Apache-2.0 | 2023-09-14T21:53:32 | 2015-06-10T23:29:41 | Python | UTF-8 | Python | false | false | 15,787 | py | import synapse.lib.module as s_module
class TransportModule(s_module.CoreModule):
def getModelDefs(self):
modl = {
'types': (
('transport:direction', ('hugenum', {'modulo': 360}), {
'doc': 'A direction measured in degrees with 0.0 being true North.'}),
('transport:land:vehicle', ('guid', {}), {
'doc': 'An individual vehicle.'}),
('transport:land:registration', ('guid', {}), {
'doc': 'Registration issued to a contact for a land vehicle.'}),
('transport:land:license', ('guid', {}), {
'doc': 'A license to operate a land vehicle issued to a contact.'}),
('transport:air:craft', ('guid', {}), {
'doc': 'An individual aircraft.'}),
('transport:air:tailnum', ('str', {'lower': True, 'strip': True, 'regex': '^[a-z0-9-]{2,}$'}), {
'doc': 'An aircraft registration number or military aircraft serial number.',
'ex': 'ff023'}),
('transport:air:flightnum', ('str', {'lower': True, 'strip': True, 'replace': ((' ', ''),), 'regex': '^[a-z]{2}[0-9]{1,4}$'}), {
'doc': 'A commercial flight designator including airline and serial.',
'ex': 'ua2437'}),
('transport:air:telem', ('guid', {}), {
'doc': 'A telemetry sample from an aircraft in transit.'}),
('transport:air:flight', ('guid', {}), {
'doc': 'An individual instance of a flight.'}),
('transport:air:occupant', ('guid', {}), {
'doc': 'An occupant of a specific flight.'}),
('transport:air:port', ('str', {'lower': True}), {
'doc': 'An IATA assigned airport code.'}),
('transport:sea:vessel', ('guid', {}), {
'doc': 'An individual sea vessel.'}),
('transport:sea:mmsi', ('str', {'regex': '[0-9]{9}'}), {
'doc': 'A Maritime Mobile Service Identifier'}),
('transport:sea:imo', ('str', {'lower': True, 'strip': True, 'replace': ((' ', ''),), 'regex': '^imo[0-9]{7}$'}), {
'doc': 'An International Maritime Organization registration number.'}),
('transport:sea:telem', ('guid', {}), {
'doc': 'A telemetry sample from a vessel in transit.'}),
# TODO a few more items to plumb eventually
# ('transport:sea:hin',
# ('transport:sea:port',
),
'forms': (
('transport:land:license', {}, (
('id', ('str', {'strip': True}), {
'doc': 'The license ID.'}),
# TODO type ( drivers license, commercial trucking, etc? )
('contact', ('ps:contact', {}), {
'doc': 'The contact info of the registrant.'}),
('issued', ('time', {}), {
'doc': 'The time the license was issued.'}),
('expires', ('time', {}), {
'doc': 'The time the license expires.'}),
('issuer', ('ou:org', {}), {
'doc': 'The org which issued the license.'}),
('issuer:name', ('ou:name', {}), {
'doc': 'The name of the org which issued the license.'}),
)),
('transport:land:registration', {}, (
('id', ('str', {'strip': True}), {
'doc': 'The vehicle registration ID or license plate.'}),
('contact', ('ps:contact', {}), {
'doc': 'The contact info of the registrant.'}),
('license', ('transport:land:license', {}), {
'doc': 'The license used to register the vehicle.'}),
('issued', ('time', {}), {
'doc': 'The time the vehicle registration was issued.'}),
('expires', ('time', {}), {
'doc': 'The time the vehicle registration expires.'}),
('vehicle', ('transport:land:vehicle', {}), {
'doc': 'The vehicle being registered.'}),
('issuer', ('ou:org', {}), {
'doc': 'The org which issued the registration.'}),
('issuer:name', ('ou:name', {}), {
'doc': 'The name of the org which issued the registration.'}),
)),
('transport:land:vehicle', {}, (
('serial', ('str', {'strip': True}), {
'doc': 'The serial number or VIN of the vehicle.'}),
('built', ('time', {}), {
'doc': 'The date the vehicle was constructed.'}),
('make', ('ou:name', {}), {
'doc': 'The make of the vehicle.'}),
('model', ('str', {'lower': True, 'onespace': True}), {
'doc': 'The model of the vehicle.'}),
('registration', ('transport:land:registration', {}), {
'doc': 'The current vehicle registration information.'}),
('owner', ('ps:contact', {}), {
'doc': 'The contact info of the owner of the vehicle.'}),
)),
('transport:air:craft', {}, (
('tailnum', ('transport:air:tailnum', {}), {
'doc': 'The aircraft tail number.'}),
('type', ('str', {'lower': True, 'strip': True}), {
'doc': 'The type of aircraft.'}),
('built', ('time', {}), {
'doc': 'The date the aircraft was constructed.'}),
('make', ('str', {'lower': True, 'strip': True}), {
'doc': 'The make of the aircraft.'}),
('model', ('str', {'lower': True, 'strip': True}), {
'doc': 'The model of the aircraft.'}),
('serial', ('str', {'strip': True}), {
'doc': 'The serial number of the aircraft.'}),
('operator', ('ps:contact', {}), {
'doc': 'Contact info representing the person or org that operates the aircraft.'}),
)),
('transport:air:port', {}, (
('name', ('str', {'lower': True, 'onespace': True}), {
'doc': 'The name of the airport'}),
('place', ('geo:place', {}), {
'doc': 'The place where the IATA airport code is assigned.'}),
)),
('transport:air:tailnum', {}, (
('loc', ('loc', {}), {
'doc': 'The geopolitical location that the tailnumber is allocated to.'}),
('type', ('str', {'lower': True, 'strip': True}), {
'doc': 'A type which may be specific to the country prefix.'}),
)),
('transport:air:flightnum', {}, (
('carrier', ('ou:org', {}), {
'doc': 'The org which operates the given flight number.'}),
('to:port', ('transport:air:port', {}), {
'doc': 'The most recently registered destination for the flight number.'}),
('from:port', ('transport:air:port', {}), {
'doc': 'The most recently registered origin for the flight number.'}),
('stops', ('array', {'type': 'transport:air:port'}), {
'doc': 'An ordered list of aiport codes for the flight segments.'}),
)),
('transport:air:flight', {}, (
('num', ('transport:air:flightnum', {}), {
'doc': 'The flight number of this flight.'}),
('scheduled:departure', ('time', {}), {
'doc': 'The time this flight was originally scheduled to depart'}),
('scheduled:arrival', ('time', {}), {
'doc': 'The time this flight was originally scheduled to arrive'}),
('departed', ('time', {}), {
'doc': 'The time this flight departed'}),
('arrived', ('time', {}), {
'doc': 'The time this flight arrived'}),
('carrier', ('ou:org', {}), {
'doc': 'The org which operates the given flight number.'}),
('craft', ('transport:air:craft', {}), {
'doc': 'The aircraft that flew this flight.'}),
('tailnum', ('transport:air:tailnum', {}), {
'doc': 'The tail/registration number at the time the aircraft flew this flight.'}),
('to:port', ('transport:air:port', {}), {
'doc': 'The destination airport of this flight.'}),
('from:port', ('transport:air:port', {}), {
'doc': 'The origin airport of this flight.'}),
('stops', ('array', {'type': 'transport:air:port'}), {
'doc': 'An ordered list of airport codes for stops which occurred during this flight.'}),
('cancelled', ('bool', {}), {
'doc': 'Set to true for cancelled flights.'}),
)),
('transport:air:telem', {}, (
('flight', ('transport:air:flight', {}), {
'doc': 'The flight being measured.'}),
('latlong', ('geo:latlong', {}), {
'doc': 'The lat/lon of the aircraft at the time.'}),
('loc', ('loc', {}), {
'doc': 'The location of the aircraft at the time.'}),
('place', ('geo:place', {}), {
'doc': 'The place that the lat/lon geocodes to.'}),
('accuracy', ('geo:dist', {}), {
'doc': 'The horizontal accuracy of the latlong sample.'}),
('course', ('transport:direction', {}), {
'doc': 'The direction, in degrees from true North, that the aircraft is traveling.'}),
('heading', ('transport:direction', {}), {
'doc': 'The direction, in degrees from true North, that the nose of the aircraft is pointed.'}),
('speed', ('velocity', {}), {
'doc': 'The ground speed of the aircraft at the time.'}),
('airspeed', ('velocity', {}), {
'doc': 'The air speed of the aircraft at the time.'}),
('verticalspeed', ('velocity', {'relative': True}), {
'doc': 'The relative vertical speed of the aircraft at the time.'}),
('altitude', ('geo:altitude', {}), {
'doc': 'The altitude of the aircraft at the time.'}),
('altitude:accuracy', ('geo:dist', {}), {
'doc': 'The vertical accuracy of the altitude measurement.'}),
('time', ('time', {}), {
'doc': 'The time the telemetry sample was taken.'})
)),
('transport:air:occupant', {}, (
('type', ('str', {'lower': True}), {
'doc': 'The type of occupant such as pilot, crew or passenger.'}),
('flight', ('transport:air:flight', {}), {
'doc': 'The flight that the occupant was aboard.'}),
('seat', ('str', {'lower': True}), {
'doc': 'The seat assigned to the occupant'}),
('contact', ('ps:contact', {}), {
'doc': 'The contact information of the occupant.'}),
)),
# TODO ais numbers
('transport:sea:vessel', {}, (
('imo', ('transport:sea:imo', {}), {
'doc': 'The International Maritime Organization number for the vessel.'}),
('name', ('str', {'lower': True, 'onespace': True}), {
'doc': 'The name of the vessel'}),
('length', ('geo:dist', {}), {
'doc': 'The official overall vessel length'}),
('beam', ('geo:dist', {}), {
'doc': 'The official overall vessel beam'}),
('flag', ('iso:3166:cc', {}), {
'doc': 'The country the vessel is flagged to.'}),
('mmsi', ('transport:sea:mmsi', {}), {
'doc': 'The Maritime Mobile Service Identifier assigned to the vessel.'}),
('built', ('time', {}), {
'doc': 'The year the vessel was constructed.'}),
('make', ('str', {'lower': True, 'strip': True}), {
'doc': 'The make of the vessel.'}),
('model', ('str', {'lower': True, 'strip': True}), {
'doc': 'The model of the vessel.'}),
('operator', ('ps:contact', {}), {
'doc': 'The contact information of the operator.'}),
# TODO tonnage / gross tonnage?
)),
('transport:sea:telem', {}, (
('vessel', ('transport:sea:vessel', {}), {
'doc': 'The vessel being measured.'}),
('time', ('time', {}), {
'doc': 'The time the telemetry was sampled.'}),
('latlong', ('geo:latlong', {}), {
'doc': 'The lat/lon of the vessel at the time.'}),
('loc', ('loc', {}), {
'doc': 'The location of the vessel at the time.'}),
('place', ('geo:place', {}), {
'doc': 'The place that the lat/lon geocodes to.'}),
('accuracy', ('geo:dist', {}), {
'doc': 'The horizontal accuracy of the latlong sample.'}),
('course', ('transport:direction', {}), {
'doc': 'The direction, in degrees from true North, that the vessel is traveling.'}),
('heading', ('transport:direction', {}), {
'doc': 'The direction, in degrees from true North, that the bow of the vessel is pointed.'}),
('speed', ('velocity', {}), {
'doc': 'The speed of the vessel at the time.'}),
('draft', ('geo:dist', {}), {
'doc': 'The keel depth at the time.'}),
('airdraft', ('geo:dist', {}), {
'doc': 'The maximum height of the ship from the waterline.'}),
('destination', ('geo:place', {}), {
'doc': 'The fully resolved destination that the vessel has declared.'}),
('destination:name', ('geo:name', {}), {
'doc': 'The name of the destination that the vessel has declared.'}),
('destination:eta', ('time', {}), {
'doc': 'The estimated time of arrival that the vessel has declared.'}),
)),
),
}
return (('transport', modl), )
| [
"[email protected]"
] | |
cc5b182b31c15e0834f851a86264418069dace1b | 09e5cfe06e437989a2ccf2aeecb9c73eb998a36c | /modules/cctbx_project/mmtbx/refinement/tst_occupancy_selections.py | 00e45d369bbd20f50d9ca7e271fc4920cf51cc44 | [
"BSD-3-Clause-LBNL",
"BSD-3-Clause"
] | permissive | jorgediazjr/dials-dev20191018 | b81b19653624cee39207b7cefb8dfcb2e99b79eb | 77d66c719b5746f37af51ad593e2941ed6fbba17 | refs/heads/master | 2020-08-21T02:48:54.719532 | 2020-01-25T01:41:37 | 2020-01-25T01:41:37 | 216,089,955 | 0 | 1 | BSD-3-Clause | 2020-01-25T01:41:39 | 2019-10-18T19:03:17 | Python | UTF-8 | Python | false | false | 67,722 | py | from __future__ import absolute_import, division, print_function
from mmtbx.monomer_library import pdb_interpretation
from mmtbx.refinement.occupancies import occupancy_selections
from mmtbx.command_line import fmodel
import mmtbx.model
from iotbx import file_reader
import iotbx.pdb
import iotbx.phil
from libtbx.test_utils import approx_equal, Exception_expected
from libtbx.utils import format_cpu_times, null_out, Sorry
import libtbx.load_env
from six.moves import cStringIO as StringIO
import os
import sys
from six.moves import zip
def extract_serials(atoms, occ_groups):
r = []
# for atom in atoms:
# assert atom.serial == atom.i_seq, "%s %d" % (atom.serial, atom.i_seq)
for i in occ_groups:
ri = []
for j in i:
ri.append([int(atoms[k].serial) for k in j])
r.append(ri)
return r
def make_up_other_constrained_groups_obj(selections):
result = []
class foo:
def __init__(self, selection):
self.selection=selection
for sel in selections:
result.append( foo(selection = sel) )
return result
def get_model(file_name, log):
pdb_interpretation_params = iotbx.phil.parse(
input_string=pdb_interpretation.grand_master_phil_str, process_includes=True).extract()
pdb_interpretation_params.pdb_interpretation.sort_atoms=False
pdb_inp = iotbx.pdb.input(file_name=file_name)
return mmtbx.model.manager(
model_input = pdb_inp,
process_input = True,
pdb_interpretation_params=pdb_interpretation_params,
stop_for_unknowns = False,
log=log)
def get_model_str(strings, log):
pdb_interpretation_params = iotbx.phil.parse(
input_string=pdb_interpretation.grand_master_phil_str, process_includes=True).extract()
pdb_interpretation_params.pdb_interpretation.sort_atoms=False
pdb_inp = iotbx.pdb.input(lines=strings, source_info=None)
return mmtbx.model.manager(
model_input = pdb_inp,
process_input = True,
pdb_interpretation_params=pdb_interpretation_params,
stop_for_unknowns = False,
log=log)
def exercise_00(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/gocr.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
base = [ [[2],[3]], [[6,7,8,9,10],[11,12,13,14,15]], [[16],[17]], [[24,25,26,27],[28,29,30,31]] ]
# default
res = occupancy_selections(
model = model,
as_flex_arrays = False)
res = extract_serials(model.pdb_atoms, res)
target = base[:]
target.insert(3, [[21]])
target.insert(4, [[23]])
assert approx_equal(res, target)
# default + add water
res = occupancy_selections(
model = model,
add_water = True,
as_flex_arrays = False)
res = extract_serials(model.pdb_atoms, res)
base_21_23 = target[:]
target.extend([[[18]], [[19]], [[20]], [[22]]])
assert approx_equal(res, target)
# 1
res = occupancy_selections(
model = model,
as_flex_arrays = False,
other_individual_selection_strings = ['resseq 0 and not (altloc A or altloc B)'])
res = extract_serials(model.pdb_atoms, res)
target = base_21_23[:]
target.extend([[[0]], [[1]], [[4]], [[5]]])
assert approx_equal(res, target)
res = occupancy_selections(
model = model,
add_water = True,
as_flex_arrays = False,
other_individual_selection_strings = ['resseq 0 and not (altloc A or altloc B)'])
res = extract_serials(model.pdb_atoms, res)
target.extend([[[18]], [[19]], [[20]], [[22]]])
assert approx_equal(res, target)
# 2
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [ ['resseq 0 and (name S or name O1)'], ['resseq 0 and (name O3 or name O4)'] ])
res = occupancy_selections(
model = model,
as_flex_arrays = False,
other_constrained_groups = other_constrained_groups)
res = extract_serials(model.pdb_atoms, res)
target = base_21_23[:]
target.extend([[[0, 1]], [[4, 5]]])
assert approx_equal(res, target)
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [ ['resseq 0 and (name S or name O1)'], ['resseq 0 and (name O3 or name O4)'] ])
res = occupancy_selections(
model = model,
add_water = True,
as_flex_arrays = False,
other_constrained_groups = other_constrained_groups)
res = extract_serials(model.pdb_atoms, res)
target.extend([[[18]], [[19]], [[20]], [[22]]])
assert approx_equal(res, target)
# 3
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [ ['resseq 0 and (name O3 or name O4)'] ])
res = occupancy_selections(
model = model,
as_flex_arrays = False,
other_individual_selection_strings = ['resseq 0 and (name S or name O1)'],
other_constrained_groups = other_constrained_groups)
res = extract_serials(model.pdb_atoms, res)
target = base_21_23[:]
target.extend([[[0]], [[1]], [[4, 5]]])
assert approx_equal(res, target)
def exercise_01(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/ala_h.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
#
base = [ [[0,1,2,3,4,10,12,14,16,18,20,22], [5,6,7,8,9,11,13,15,17,19,21,23]] ]
res = occupancy_selections(
model = model,
as_flex_arrays = False)
res = extract_serials(model.pdb_atoms, res)
assert approx_equal(res, base)
def exercise_02(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/occ_mix1.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
#
base = [ [[0,1,2,3,4,5,6,7,8,9,10,11,12], [14,15,16,17,18,19,20,21,22,23,24,25,26]], [[13],[27]] ]
res = occupancy_selections(
model = model,
as_flex_arrays = False)
res = extract_serials(model.pdb_atoms, res)
assert approx_equal(res, base)
def exercise_03(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/ala_hd.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
#
base = [ [[7]], [[8]], [[9],[12]], [[10],[13]], [[11],[14]] ]
res = occupancy_selections(
model = model,
as_flex_arrays = False)
res = extract_serials(model.pdb_atoms, res)
assert approx_equal(res, base)
def exercise_05(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/ala_lys_arg_ser_tyr_neutron_hd.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
#
base = [ [[9],[12]], [[10],[13]], [[11],[14]], [[33],[37]], [[34],[38]],
[[35],[39]], [[36],[40]], [[59],[65]], [[60],[66]], [[61],[67]],
[[62],[68]], [[63],[69]], [[64],[70]], [[80],[82]], [[81],[83]],
[[103],[105]], [[104],[106]]]
res = occupancy_selections(
model = model,
as_flex_arrays = False)
res = extract_serials(model.pdb_atoms, res)
assert approx_equal(res, base)
def exercise_06(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/NAD_594_HD.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
#
base = [ [[62]], [[113]], [[65],[77]], [[66],[78]], [[67],[79]], [[68],[80]],
[[69],[81]], [[70],[82]], [[71],[83]], [[72],[84]],
[[73],[85]], [[74],[86]], [[75],[87]], [[76],[88]],
[[124],[127]],[[125],[128]],[[126],[129]]]
res = occupancy_selections(
model = model,
as_flex_arrays = False)
assert approx_equal(res, base)
def exercise_07(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/gocr_1.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answer = [ [[0, 1, 2, 3, 4]] ]
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [ ['resseq 0'] ])
result = occupancy_selections(
model = model,
other_constrained_groups = other_constrained_groups,
as_flex_arrays = False)
assert approx_equal(result, answer)
def exercise_08(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/gocr_2.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answers = [
[ [[6,7,8,9,10],[11,12,13,14,15]], [[16],[17]], [[21]], [[23]], [[24,25,26,27],[28,29,30,31]], [[0,1,2,3,4,5]] ],
[ [[4],[5]], [[16],[17]], [[21]], [[23]], [[24,25,26,27],[28,29,30,31]], [[6,7,8,9,10,11,12,13,14,15]] ],
[ [[4],[5]], [[6,7,8,9,10],[11,12,13,14,15]], [[21]], [[23]], [[24,25,26,27],[28,29,30,31]], [[16,17]] ],
[ [[4],[5]], [[6,7,8,9,10],[11,12,13,14,15]], [[16],[17]], [[21]], [[23]], [[24,25,26,27],[28,29,30,31]], [[18,19,20]] ],
[ [[4],[5]], [[6,7,8,9,10],[11,12,13,14,15]], [[16],[17]], [[23]], [[24,25,26,27],[28,29,30,31]], [[21]] ],
[ [[4],[5]], [[6,7,8,9,10],[11,12,13,14,15]], [[16],[17]], [[21]], [[23]], [[24,25,26,27],[28,29,30,31]], [[22]] ],
[ [[4],[5]], [[6,7,8,9,10],[11,12,13,14,15]], [[16],[17]], [[21]], [[23,24,25,26,27,28,29,30,31]] ],
[ [[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31]] ]
]
group_selections = ['resseq 0',
'resseq 1',
'resseq 2',
'resseq 3',
'resseq 4',
'resseq 5',
'resseq 6',
'resseq 0:6']
for group_selection, answer in zip(group_selections, answers):
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [ [group_selection] ])
result = occupancy_selections(
model = model,
other_constrained_groups = other_constrained_groups,
as_flex_arrays = False)
assert approx_equal(result, answer)
def exercise_09(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/gocr_2.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answers = [
[ [[6,7,8,9,10],[11,12,13,14,15]], [[16],[17]], [[21]], [[23]], [[24,25,26,27],[28,29,30,31]], [[0]], [[1]], [[2]], [[3]], [[4]], [[5]] ],
[ [[4],[5]], [[16],[17]], [[21]], [[23]], [[24,25,26,27],[28,29,30,31]], [[6]], [[7]], [[8]], [[9]], [[10]], [[11]], [[12]], [[13]], [[14]], [[15]] ],
[ [[4],[5]], [[6,7,8,9,10],[11,12,13,14,15]], [[21]], [[23]], [[24,25,26,27],[28,29,30,31]], [[16]], [[17]] ],
[ [[4],[5]], [[6,7,8,9,10],[11,12,13,14,15]], [[16],[17]], [[21]], [[23]], [[24,25,26,27],[28,29,30,31]], [[18]], [[19]], [[20]] ],
[ [[4],[5]], [[6,7,8,9,10],[11,12,13,14,15]], [[16],[17]], [[23]], [[24,25,26,27],[28,29,30,31]], [[21]] ],
[ [[4],[5]], [[6,7,8,9,10],[11,12,13,14,15]], [[16],[17]], [[21]], [[23]], [[24,25,26,27],[28,29,30,31]], [[22]] ],
[ [[4],[5]], [[6,7,8,9,10],[11,12,13,14,15]], [[16],[17]], [[21]], [[23]], [[24]], [[25]], [[26]], [[27]], [[28]], [[29]], [[30]], [[31]] ]
]
individual_selections = ['resseq 0',
'resseq 1',
'resseq 2',
'resseq 3',
'resseq 4',
'resseq 5',
'resseq 6',
'resseq 0:6']
for individual_selection, answer in zip(individual_selections, answers):
result = occupancy_selections(
model = model,
other_individual_selection_strings = [individual_selection],
as_flex_arrays = False)
assert approx_equal(result, answer)
def exercise_10(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/gocr.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
e = None
try:
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [ ['resseq 0'] ])
result = occupancy_selections(
model = model,
other_constrained_groups = other_constrained_groups,
other_individual_selection_strings = ['resseq 0'],
as_flex_arrays = False)
except Exception as e: pass
assert e.__str__() == "Duplicate selection: same atoms selected for individual and group occupancy refinement."
def exercise_11(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/gocr.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
e = None
try:
result = occupancy_selections(
model = model,
remove_selection = ['resseq 0'],
other_individual_selection_strings = ['resseq 0'],
as_flex_arrays = False)
except Exception as e: pass
assert e.__str__() == "Duplicate selection: occupancies of same atoms selected to be fixed and to be refined."
e = None
try:
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [ ['resseq 0'] ])
result = occupancy_selections(
model = model,
other_constrained_groups = other_constrained_groups,
remove_selection = ['resseq 0'],
as_flex_arrays = False)
except Exception as e: pass
assert e.__str__() == "Duplicate selection: occupancies of same atoms selected to be fixed and to be refined."
def exercise_12(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/gocr_2.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answer = [ [[4],[5]], [[16],[17]], [[21]], [[23,24,25,26,27,28,29,30,31]] ]
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [ ['resseq 6'] ])
result = occupancy_selections(
model = model,
remove_selection = ['resseq 1'],
other_constrained_groups = other_constrained_groups,
as_flex_arrays = False)
assert approx_equal(result, answer)
#
answer = [ [[4],[5]], [[16],[17]], [[21]], [[23]], [[24]], [[25]], [[26]], [[27]], [[28]], [[29]], [[30]], [[31]] ]
result = occupancy_selections(
model = model,
remove_selection = ['resseq 1'],
other_individual_selection_strings = ['resseq 6'],
as_flex_arrays = False)
assert approx_equal(result, answer)
def exercise_13(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/lys_1.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answer = [ [[8],[9]], [[10]], [[0],[1]], [[2],[3]] ]
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [ ['chain A and resseq 1 and name N','chain A and resseq 1 and name CA'],
['chain A and resseq 1 and name C','chain A and resseq 1 and name O'] ]
)
result = occupancy_selections(
model = model,
other_constrained_groups = other_constrained_groups,
as_flex_arrays = False)
assert approx_equal(result, answer)
def exercise_14(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/lys_1.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answer = [ [[8],[9]], [[10]], [[0,1,2],[3,4]], [[5],[6]], [[7]] ]
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [ ['chain A and resseq 1 and (name N or name CA or name C)', 'chain A and resseq 1 and (name O or name CB)'],
['chain A and resseq 1 and name CG','chain A and resseq 1 and name CD'],
['chain A and resseq 1 and name CE'] ]
)
result = occupancy_selections(
model = model,
other_constrained_groups = other_constrained_groups,
as_flex_arrays = False)
assert approx_equal(result, answer)
def exercise_15(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/lys_1.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answer = [ [[8],[9]], [[0,1,2],[10]], [[5,7]] ]
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [ ['chain A and resseq 1 and (name N or name CA or name C)', 'chain S and resseq 1'],
['chain A and resseq 1 and name CG or chain A and resseq 1 and name CE'] ]
)
result = occupancy_selections(
model = model,
other_constrained_groups = other_constrained_groups,
as_flex_arrays = False)
assert approx_equal(result, answer)
def exercise_16(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/lys_1.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answer = [ [[8],[9],[10]] ]
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [
['chain A and resseq 1 and name NZ and altloc A', 'chain A and resseq 1 and name NZ and altloc B', 'chain S and resseq 1'] ]
)
result = occupancy_selections(
model = model,
other_constrained_groups = other_constrained_groups,
as_flex_arrays = False)
assert approx_equal(result, answer)
def exercise_17(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/lys_1.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answer = [ [[8,9,10]] ]
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [
['chain A and resseq 1 and name NZ and altloc A or chain A and resseq 1 and name NZ and altloc B or chain S and resseq 1'] ]
)
result = occupancy_selections(
model = model,
other_constrained_groups = other_constrained_groups,
as_flex_arrays = False)
assert approx_equal(result, answer)
def exercise_18(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/lys_2.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answer = [ [[8],[9],[10]] ]
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [
['chain A and resseq 1 and name NZ and altloc A','chain A and resseq 1 and name NZ and altloc B','chain S and resseq 1 and altloc C']]
)
result = occupancy_selections(
model = model,
other_constrained_groups = other_constrained_groups,
as_flex_arrays = False)
assert approx_equal(result, answer)
def exercise_19(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/lys_1.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answer = [ [[8],[9],[10]] ]
tmp = "chain A and resseq 1 and name XX and altloc A"
other_constrained_groups = make_up_other_constrained_groups_obj(
selections = [[
tmp,
'chain A and resseq 1 and name NZ and altloc B',
'chain S and resseq 1']])
try:
result = occupancy_selections(
model = model,
other_constrained_groups = other_constrained_groups,
as_flex_arrays = False)
except Exception as e: pass
assert str(e) == \
'Selection string results in empty selection (selects no atoms): "%s"' \
% tmp
def exercise_20(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/ile_2conf_h.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
answer = [ [[4,5,6,7,8,9,10,11,12,13,14,15,16,17,18], [19,20,21,22,23,24,25,26,27,28,29,30,31,32,33]] ]
result = occupancy_selections(
model = model,
as_flex_arrays = False)
assert approx_equal(result, answer)
def exercise_21(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/gocr_3.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
#
base = [[[2], [3]],
[[6, 7, 8, 9, 10], [11, 12, 13, 14, 15]],
[[16], [17]],
[[21]],
[[23]],
[[24, 25, 26, 27], [28, 29, 30, 31]],
[[36]],
[[47]],
[[48]],
[[49]],
[[50]],
[[51]],
[[53]],
[[56, 57, 58, 59]],
[[60, 61, 62, 63]],
[[64, 65, 66, 67, 68]],
[[37], [40]],
[[38], [41]],
[[39], [42]],
[[43, 44, 45, 46]]]
res = occupancy_selections(
model = model,
as_flex_arrays = False)
res = extract_serials(model.pdb_atoms, res)
assert approx_equal(res, base)
def exercise_22(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/gocr_4.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
#
base = [[[0, 1, 2, 3, 8, 9, 10, 11, 12], [4, 5, 6, 7, 13, 14, 15, 16, 17]]]
res = occupancy_selections(
model = model,
as_flex_arrays = False)
res = extract_serials(model.pdb_atoms, res)
assert approx_equal(res, base)
def exercise_23(verbose):
pdb_file = libtbx.env.find_in_repositories(
relative_path="phenix_regression/pdb/gocr_5.pdb",
test=os.path.isfile)
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model(pdb_file, log)
#
base = [[[1, 2, 3, 4, 5, 6]], [[7, 8, 9, 10, 11], [12, 13, 14, 15, 16]]]
res = occupancy_selections(
model = model,
as_flex_arrays = False)
res = extract_serials(model.pdb_atoms, res)
assert approx_equal(res, base)
def exercise_24(verbose):
pdb_str1="""\
CRYST1 10.707 11.101 13.552 90.00 90.00 90.00 P 1
ATOM 0 N AALA A 9 3.452 6.807 3.508 0.19 9.33 A N
ATOM 1 CA AALA A 9 4.572 6.204 4.211 0.19 9.82 A C
ATOM 2 C AALA A 9 4.165 5.990 5.664 0.19 10.34 A C
ATOM 3 O AALA A 9 3.000 6.165 6.021 0.19 10.96 A O
ATOM 4 CB AALA A 9 5.792 7.098 4.116 0.19 10.31 A C
ATOM 5 H AALA A 9 3.466 7.667 3.487 0.19 8.78 A H
ATOM 6 HA AALA A 9 4.802 5.351 3.810 0.19 9.23 A H
ATOM 7 HB1AALA A 9 6.533 6.686 4.588 0.19 9.91 A H
ATOM 8 HB2AALA A 9 6.031 7.221 3.184 0.19 9.91 A H
ATOM 9 HB3AALA A 9 5.594 7.960 4.515 0.19 9.91 A H
ATOM 10 N BALA A 9 3.348 6.697 3.518 0.28 8.28 A N
ATOM 11 CA BALA A 9 4.461 6.052 4.195 0.28 9.14 A C
ATOM 12 C BALA A 9 4.138 5.964 5.683 0.28 9.84 A C
ATOM 13 O BALA A 9 3.003 6.215 6.089 0.28 10.68 A O
ATOM 14 CB BALA A 9 5.726 6.829 3.952 0.28 9.20 A C
ATOM 15 H BALA A 9 3.422 7.551 3.454 0.28 8.78 A H
ATOM 16 HA BALA A 9 4.597 5.156 3.849 0.28 9.23 A H
ATOM 17 HB1BALA A 9 6.465 6.395 4.406 0.28 9.91 A H
ATOM 18 HB2BALA A 9 5.907 6.863 3.000 0.28 9.91 A H
ATOM 19 HB3BALA A 9 5.623 7.731 4.294 0.28 9.91 A H
ATOM 20 N CALA A 9 3.608 6.763 3.402 0.28 8.32 A N
ATOM 21 CA CALA A 9 4.617 6.060 4.177 0.28 9.56 A C
ATOM 22 C CALA A 9 4.219 6.081 5.651 0.28 10.15 A C
ATOM 23 O CALA A 9 3.126 6.528 6.006 0.28 10.64 A O
ATOM 24 CB CALA A 9 5.981 6.684 3.973 0.28 10.39 A C
ATOM 25 H CALA A 9 3.801 7.579 3.210 0.28 8.78 A H
ATOM 26 HA CALA A 9 4.671 5.139 3.876 0.28 9.23 A H
ATOM 27 HB1CALA A 9 6.639 6.202 4.497 0.28 9.91 A H
ATOM 28 HB2CALA A 9 6.220 6.639 3.034 0.28 9.91 A H
ATOM 29 HB3CALA A 9 5.959 7.611 4.257 0.28 9.91 A H
ATOM 30 N DALA A 9 3.518 6.930 3.530 0.25 8.78 A N
ATOM 31 CA DALA A 9 4.639 6.333 4.232 0.25 9.23 A C
ATOM 32 C DALA A 9 4.203 6.093 5.674 0.25 10.10 A C
ATOM 33 O DALA A 9 3.051 6.346 6.031 0.25 10.72 A O
ATOM 34 CB DALA A 9 5.837 7.255 4.177 0.25 9.91 A C
ATOM 35 H DALA A 9 3.490 7.789 3.568 0.25 8.78 A H
ATOM 36 HA DALA A 9 4.898 5.494 3.819 0.25 9.23 A H
ATOM 37 HB1DALA A 9 6.581 6.848 4.648 0.25 9.91 A H
ATOM 38 HB2DALA A 9 6.086 7.408 3.252 0.25 9.91 A H
ATOM 39 HB3DALA A 9 5.614 8.101 4.595 0.25 9.91 A H
ATOM 40 N VAL A 10 5.119 5.606 6.502 1.00 11.13 A N
ATOM 41 CA VAL A 10 4.846 5.470 7.925 1.00 12.50 A C
ATOM 42 C VAL A 10 4.347 6.801 8.520 1.00 11.26 A C
ATOM 43 O VAL A 10 4.763 7.871 8.095 1.00 11.53 A O
ATOM 44 HA VAL A 10 4.118 4.835 8.017 1.00 12.50 A H
ATOM 45 CB AVAL A 10 5.994 4.806 8.722 0.21 14.17 A C
ATOM 46 CG1AVAL A 10 6.640 3.699 7.889 0.21 14.17 A C
ATOM 47 CG2AVAL A 10 7.005 5.815 9.197 0.21 15.20 A C
ATOM 48 H AVAL A 10 5.926 5.421 6.269 0.19 11.13 A H
ATOM 49 HB AVAL A 10 5.616 4.404 9.520 0.21 14.91 A H
ATOM 50 HG11AVAL A 10 7.358 3.289 8.396 0.21 16.29 A H
ATOM 51 HG12AVAL A 10 5.975 3.028 7.671 0.21 16.29 A H
ATOM 52 HG13AVAL A 10 6.998 4.077 7.070 0.21 16.29 A H
ATOM 53 HG21AVAL A 10 7.707 5.363 9.691 0.21 15.63 A H
ATOM 54 HG22AVAL A 10 7.391 6.271 8.433 0.21 15.63 A H
ATOM 55 HG23AVAL A 10 6.570 6.462 9.774 0.21 15.63 A H
ATOM 56 CB BVAL A 10 6.135 4.987 8.645 0.79 14.91 A C
ATOM 57 CG1BVAL A 10 6.081 5.228 10.144 0.79 16.28 A C
ATOM 58 CG2BVAL A 10 6.351 3.507 8.360 0.79 15.63 A C
ATOM 59 H BVAL A 10 5.928 5.441 6.263 0.28 11.13 A H
ATOM 60 HB BVAL A 10 6.879 5.504 8.299 0.79 14.91 A H
ATOM 61 HG11BVAL A 10 6.902 4.913 10.552 0.79 16.29 A H
ATOM 62 HG12BVAL A 10 5.978 6.177 10.316 0.79 16.29 A H
ATOM 63 HG13BVAL A 10 5.328 4.748 10.522 0.79 16.29 A H
ATOM 64 HG21BVAL A 10 7.156 3.205 8.809 0.79 15.63 A H
ATOM 65 HG22BVAL A 10 5.590 3.000 8.685 0.79 15.63 A H
ATOM 66 HG23BVAL A 10 6.445 3.372 7.404 0.79 15.63 A H
ATOM 67 H CVAL A 10 5.907 5.353 6.270 0.28 11.13 A H
ATOM 68 H DVAL A 10 5.903 5.349 6.260 0.25 11.13 A H
TER
END
"""
pdb_str2="""\
CRYST1 10.707 11.101 13.552 90.00 90.00 90.00 P 1
ATOM 0 N AALA A 9 3.452 6.807 3.508 0.19 9.33 A N
ATOM 1 CA AALA A 9 4.572 6.204 4.211 0.19 9.82 A C
ATOM 2 C AALA A 9 4.165 5.990 5.664 0.19 10.34 A C
ATOM 3 O AALA A 9 3.000 6.165 6.021 0.19 10.96 A O
ATOM 4 CB AALA A 9 5.792 7.098 4.116 0.19 10.31 A C
ATOM 5 D AALA A 9 3.466 7.667 3.487 0.19 8.78 A D
ATOM 6 DA AALA A 9 4.802 5.351 3.810 0.19 9.23 A D
ATOM 7 DB1AALA A 9 6.533 6.686 4.588 0.19 9.91 A D
ATOM 8 DB2AALA A 9 6.031 7.221 3.184 0.19 9.91 A D
ATOM 9 DB3AALA A 9 5.594 7.960 4.515 0.19 9.91 A D
ATOM 10 N BALA A 9 3.348 6.697 3.518 0.28 8.28 A N
ATOM 11 CA BALA A 9 4.461 6.052 4.195 0.28 9.14 A C
ATOM 12 C BALA A 9 4.138 5.964 5.683 0.28 9.84 A C
ATOM 13 O BALA A 9 3.003 6.215 6.089 0.28 10.68 A O
ATOM 14 CB BALA A 9 5.726 6.829 3.952 0.28 9.20 A C
ATOM 15 D BALA A 9 3.422 7.551 3.454 0.28 8.78 A D
ATOM 16 DA BALA A 9 4.597 5.156 3.849 0.28 9.23 A D
ATOM 17 DB1BALA A 9 6.465 6.395 4.406 0.28 9.91 A D
ATOM 18 DB2BALA A 9 5.907 6.863 3.000 0.28 9.91 A D
ATOM 19 DB3BALA A 9 5.623 7.731 4.294 0.28 9.91 A D
ATOM 20 N CALA A 9 3.608 6.763 3.402 0.28 8.32 A N
ATOM 21 CA CALA A 9 4.617 6.060 4.177 0.28 9.56 A C
ATOM 22 C CALA A 9 4.219 6.081 5.651 0.28 10.15 A C
ATOM 23 O CALA A 9 3.126 6.528 6.006 0.28 10.64 A O
ATOM 24 CB CALA A 9 5.981 6.684 3.973 0.28 10.39 A C
ATOM 25 D CALA A 9 3.801 7.579 3.210 0.28 8.78 A D
ATOM 26 DA CALA A 9 4.671 5.139 3.876 0.28 9.23 A D
ATOM 27 DB1CALA A 9 6.639 6.202 4.497 0.28 9.91 A D
ATOM 28 DB2CALA A 9 6.220 6.639 3.034 0.28 9.91 A D
ATOM 29 DB3CALA A 9 5.959 7.611 4.257 0.28 9.91 A D
ATOM 30 N DALA A 9 3.518 6.930 3.530 0.25 8.78 A N
ATOM 31 CA DALA A 9 4.639 6.333 4.232 0.25 9.23 A C
ATOM 32 C DALA A 9 4.203 6.093 5.674 0.25 10.10 A C
ATOM 33 O DALA A 9 3.051 6.346 6.031 0.25 10.72 A O
ATOM 34 CB DALA A 9 5.837 7.255 4.177 0.25 9.91 A C
ATOM 35 D DALA A 9 3.490 7.789 3.568 0.25 8.78 A D
ATOM 36 DA DALA A 9 4.898 5.494 3.819 0.25 9.23 A D
ATOM 37 DB1DALA A 9 6.581 6.848 4.648 0.25 9.91 A D
ATOM 38 DB2DALA A 9 6.086 7.408 3.252 0.25 9.91 A D
ATOM 39 DB3DALA A 9 5.614 8.101 4.595 0.25 9.91 A D
ATOM 40 N VAL A 10 5.119 5.606 6.502 1.00 11.13 A N
ATOM 41 CA VAL A 10 4.846 5.470 7.925 1.00 12.50 A C
ATOM 42 C VAL A 10 4.347 6.801 8.520 1.00 11.26 A C
ATOM 43 O VAL A 10 4.763 7.871 8.095 1.00 11.53 A O
ATOM 44 HA VAL A 10 4.118 4.835 8.017 1.00 12.50 A D
ATOM 45 CB AVAL A 10 5.994 4.806 8.722 0.21 14.17 A C
ATOM 46 CG1AVAL A 10 6.640 3.699 7.889 0.21 14.17 A C
ATOM 47 CG2AVAL A 10 7.005 5.815 9.197 0.21 15.20 A C
ATOM 48 D AVAL A 10 5.926 5.421 6.269 0.19 11.13 A D
ATOM 49 DB AVAL A 10 5.616 4.404 9.520 0.21 14.91 A D
ATOM 50 DG11AVAL A 10 7.358 3.289 8.396 0.21 16.29 A D
ATOM 51 DG12AVAL A 10 5.975 3.028 7.671 0.21 16.29 A D
ATOM 52 DG13AVAL A 10 6.998 4.077 7.070 0.21 16.29 A D
ATOM 53 DG21AVAL A 10 7.707 5.363 9.691 0.21 15.63 A D
ATOM 54 DG22AVAL A 10 7.391 6.271 8.433 0.21 15.63 A D
ATOM 55 DG23AVAL A 10 6.570 6.462 9.774 0.21 15.63 A D
ATOM 56 CB BVAL A 10 6.135 4.987 8.645 0.79 14.91 A C
ATOM 57 CG1BVAL A 10 6.081 5.228 10.144 0.79 16.28 A C
ATOM 58 CG2BVAL A 10 6.351 3.507 8.360 0.79 15.63 A C
ATOM 59 D BVAL A 10 5.928 5.441 6.263 0.28 11.13 A D
ATOM 60 DB BVAL A 10 6.879 5.504 8.299 0.79 14.91 A D
ATOM 61 DG11BVAL A 10 6.902 4.913 10.552 0.79 16.29 A D
ATOM 62 DG12BVAL A 10 5.978 6.177 10.316 0.79 16.29 A D
ATOM 63 DG13BVAL A 10 5.328 4.748 10.522 0.79 16.29 A D
ATOM 64 DG21BVAL A 10 7.156 3.205 8.809 0.79 15.63 A D
ATOM 65 DG22BVAL A 10 5.590 3.000 8.685 0.79 15.63 A D
ATOM 66 DG23BVAL A 10 6.445 3.372 7.404 0.79 15.63 A D
ATOM 67 D CVAL A 10 5.907 5.353 6.270 0.28 11.13 A D
ATOM 68 D DVAL A 10 5.903 5.349 6.260 0.25 11.13 A D
TER
END
"""
if (verbose): log = sys.stdout
else: log = StringIO()
for pdb_str in [pdb_str1, pdb_str2]:
model = get_model_str(pdb_str, log)
res = occupancy_selections(
model = model,
as_flex_arrays = False)
answer = \
[[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 48],
[10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 59],
[20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 67],
[30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 68]],
[[45, 46, 47, 49, 50, 51, 52, 53, 54, 55],
[56, 57, 58, 60, 61, 62, 63, 64, 65, 66]]]
assert approx_equal(res, answer)
def exercise_25(verbose):
pdb_str="""\
CRYST1 10.707 11.101 13.552 90.00 90.00 90.00 P 1
ATOM 0 N ALA A 9 3.452 6.807 3.508 1.00 9.33 A N
ATOM 1 CA ALA A 9 4.572 6.204 4.211 1.00 9.82 A C
ATOM 2 C ALA A 9 4.165 5.990 5.664 1.00 10.34 A C
ATOM 3 O ALA A 9 3.000 6.165 6.021 1.00 10.96 A O
ATOM 4 CB ALA A 9 5.792 7.098 4.116 1.00 10.31 A C
ATOM 5 HA ALA A 9 4.802 5.351 3.810 1.00 9.23 A H
ATOM 6 HB1 ALA A 9 6.533 6.686 4.588 1.00 9.91 A H
ATOM 7 HB2 ALA A 9 6.031 7.221 3.184 1.00 9.91 A H
ATOM 8 HB3 ALA A 9 5.594 7.960 4.515 1.00 9.91 A H
ATOM 9 H AALA A 9 3.466 7.667 3.487 0.40 8.78 A H
ATOM 10 D BALA A 9 3.466 7.667 3.487 0.60 8.78 A D
ATOM 11 N VAL A 10 5.119 5.606 6.502 1.00 11.13 A N
ATOM 12 CA VAL A 10 4.846 5.470 7.925 1.00 12.50 A C
ATOM 13 C VAL A 10 4.347 6.801 8.520 1.00 11.26 A C
ATOM 14 O VAL A 10 4.763 7.871 8.095 1.00 11.53 A O
ATOM 15 HA VAL A 10 4.118 4.835 8.017 1.00 12.50 A H
ATOM 16 CB VAL A 10 5.994 4.806 8.722 1.00 14.17 A C
ATOM 17 CG1 VAL A 10 6.640 3.699 7.889 1.00 14.17 A C
ATOM 18 CG2 VAL A 10 7.005 5.815 9.197 1.00 15.20 A C
ATOM 19 HB VAL A 10 5.616 4.404 9.520 1.00 14.91 A H
ATOM 20 HG11 VAL A 10 7.358 3.289 8.396 1.00 16.29 A H
ATOM 21 HG12 VAL A 10 5.975 3.028 7.671 1.00 16.29 A H
ATOM 22 HG13 VAL A 10 6.998 4.077 7.070 1.00 16.29 A H
ATOM 23 HG21 VAL A 10 7.707 5.363 9.691 1.00 15.63 A H
ATOM 24 HG22 VAL A 10 7.391 6.271 8.433 1.00 15.63 A H
ATOM 25 HG23 VAL A 10 6.570 6.462 9.774 1.00 15.63 A H
ATOM 26 H AVAL A 10 5.926 5.421 6.269 0.30 11.13 A H
ATOM 27 D BVAL A 10 5.926 5.421 6.269 0.70 11.13 A D
TER
END
"""
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model_str(pdb_str, log)
res = occupancy_selections(
model = model,
as_flex_arrays = False)
answer = [ [[9],[10]], [[26],[27]] ]
assert approx_equal(res, answer)
def exercise_26(verbose):
pdb_str="""\
CRYST1 71.040 72.017 72.362 90.00 100.48 90.00 C 1 2 1
ATOM 96 N PRO L 5 2.689 13.877 15.387 1.00 13.65 N
ATOM 97 CA PRO L 5 1.824 14.762 14.572 1.00 17.31 C
ATOM 98 C PRO L 5 0.338 14.432 14.641 1.00 20.79 C
ATOM 99 O PRO L 5 -0.466 15.376 14.642 1.00 20.37 O
ATOM 100 CB PRO L 5 2.330 14.534 13.143 1.00 20.71 C
ATOM 101 CG PRO L 5 3.772 14.184 13.326 1.00 20.25 C
ATOM 102 CD PRO L 5 3.871 13.403 14.633 1.00 16.57 C
ATOM 103 HA PRO L 5 1.981 15.805 14.846 1.00 17.31 H
ATOM 104 HB2 PRO L 5 1.780 13.709 12.691 1.00 20.71 H
ATOM 105 HB3 PRO L 5 2.220 15.447 12.558 1.00 20.71 H
ATOM 106 HG2 PRO L 5 4.103 13.567 12.492 1.00 20.25 H
ATOM 107 HG3 PRO L 5 4.363 15.098 13.382 1.00 20.25 H
ATOM 108 HD2 PRO L 5 3.805 12.331 14.446 1.00 16.57 H
ATOM 109 HD3 PRO L 5 4.791 13.666 15.154 1.00 16.57 H
ATOM 110 N LEU L 6 -0.052 13.175 14.677 1.00 13.93 N
ATOM 111 CA LEU L 6 -1.446 12.769 14.667 1.00 15.53 C
ATOM 112 C LEU L 6 -2.079 12.634 16.029 1.00 17.57 C
ATOM 113 O LEU L 6 -3.268 12.311 16.111 1.00 18.17 O
ATOM 114 CB LEU L 6 -1.648 11.435 13.889 1.00 17.76 C
ATOM 115 CG LEU L 6 -1.291 11.544 12.396 1.00 18.22 C
ATOM 116 CD1 LEU L 6 -1.474 10.257 11.651 1.00 18.93 C
ATOM 117 CD2 LEU L 6 -2.125 12.629 11.689 1.00 22.55 C
ATOM 118 HA LEU L 6 -2.017 13.534 14.144 1.00 15.53 H
ATOM 119 HB2 LEU L 6 -1.011 10.669 14.331 1.00 17.76 H
ATOM 120 HB3 LEU L 6 -2.693 11.135 13.959 1.00 17.76 H
ATOM 121 HG LEU L 6 -0.242 11.827 12.310 1.00 18.22 H
ATOM 122 HD11 LEU L 6 -0.750 10.210 10.838 1.00 18.93 H
ATOM 123 HD12 LEU L 6 -1.319 9.426 12.338 1.00 18.93 H
ATOM 124 HD13 LEU L 6 -2.488 10.221 11.252 1.00 18.93 H
ATOM 125 HD21 LEU L 6 -2.084 12.462 10.613 1.00 22.55 H
ATOM 126 HD22 LEU L 6 -3.156 12.565 12.037 1.00 22.55 H
ATOM 127 HD23 LEU L 6 -1.712 13.609 11.929 1.00 22.55 H
ATOM 128 H ALEU L 6 0.595 12.387 14.715 0.50 13.93 H
ATOM 129 D BLEU L 6 0.595 12.387 14.715 0.50 13.93 D
"""
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model_str(pdb_str, log)
res = occupancy_selections(
model = model,
as_flex_arrays = False)
answer = [ [[32], [33]] ]
assert approx_equal(res, answer)
def exercise_27(verbose):
pdb_str="""\
CRYST1 64.714 39.225 38.645 90.00 117.38 90.00 C 1 2 1
ATOM 0 N SER A -1 20.605 9.913 24.660 1.00 32.98 N
ATOM 1 CA SER A -1 21.415 10.057 23.431 1.00 25.22 C
ATOM 2 C SER A -1 20.514 10.247 22.233 1.00 25.05 C
ATOM 3 O SER A -1 19.332 9.926 22.266 1.00 28.08 O
ATOM 4 CB SER A -1 22.253 8.810 23.194 1.00 28.97 C
ATOM 5 OG SER A -1 21.417 7.708 22.900 1.00 37.21 O
ATOM 6 H1 SER A -1 19.896 10.449 24.612 1.00 38.17 H
ATOM 7 H2 SER A -1 20.335 9.069 24.737 1.00 27.38 H
ATOM 8 H3 SER A -1 21.098 10.134 25.368 1.00 38.75 H
ATOM 9 HA SER A -1 21.997 10.829 23.514 1.00 12.22 H
ATOM 10 HB2 SER A -1 22.844 8.970 22.440 1.00 22.78 H
ATOM 11 HB3 SER A -1 22.771 8.614 23.990 1.00 30.47 H
ATOM 12 HG SER A -1 21.872 7.007 22.826 1.00 42.35 H
ATOM 13 N AMET A 0 21.097 10.723 21.147 0.49 20.67 N
ATOM 14 CA AMET A 0 20.340 10.870 19.929 0.49 21.49 C
ATOM 15 C AMET A 0 21.236 10.795 18.720 0.49 18.70 C
ATOM 16 O AMET A 0 22.394 11.216 18.750 0.49 19.47 O
ATOM 17 CB AMET A 0 19.569 12.183 19.945 0.49 22.62 C
ATOM 18 CG AMET A 0 20.423 13.414 20.138 0.49 24.87 C
ATOM 19 SD AMET A 0 19.580 14.932 19.650 0.49 29.00 S
ATOM 20 CE AMET A 0 17.946 14.760 20.377 0.49 36.23 C
ATOM 21 H AMET A 0 21.920 10.964 21.095 0.49 28.25 H
ATOM 22 HA AMET A 0 19.697 10.146 19.870 0.49 7.25 H
ATOM 23 HB2AMET A 0 19.093 12.280 19.105 0.49 13.51 H
ATOM 24 HB3AMET A 0 18.941 12.141 20.681 0.49 7.62 H
ATOM 25 HG2AMET A 0 20.671 13.490 21.072 0.49 26.02 H
ATOM 26 HG3AMET A 0 21.219 13.333 19.589 0.49 30.87 H
ATOM 27 HE1AMET A 0 17.284 14.819 19.669 0.49 20.79 H
ATOM 28 HE2AMET A 0 17.863 13.908 20.829 0.49 8.45 H
ATOM 29 HE3AMET A 0 17.812 15.481 21.012 0.49 30.25 H
ATOM 30 N BMET A 0 21.082 10.809 21.171 0.51 21.19 N
ATOM 31 CA BMET A 0 20.368 11.023 19.923 0.51 23.13 C
ATOM 32 C BMET A 0 21.273 10.654 18.766 0.51 21.10 C
ATOM 33 O BMET A 0 22.496 10.703 18.893 0.51 19.93 O
ATOM 34 CB BMET A 0 19.961 12.488 19.782 0.51 27.15 C
ATOM 35 CG BMET A 0 19.070 12.993 20.889 0.51 29.67 C
ATOM 36 SD BMET A 0 18.685 14.739 20.684 0.51 41.63 S
ATOM 37 CE BMET A 0 17.734 15.043 22.171 0.51 35.23 C
ATOM 38 HA BMET A 0 19.568 10.476 19.897 0.51 36.28 H
ATOM 39 HB2BMET A 0 20.762 13.035 19.778 0.51 8.59 H
ATOM 40 HB3BMET A 0 19.485 12.602 18.945 0.51 27.25 H
ATOM 41 HG2BMET A 0 18.236 12.497 20.877 0.51 21.33 H
ATOM 42 HG3BMET A 0 19.519 12.877 21.741 0.51 34.36 H
ATOM 43 HE1BMET A 0 17.141 15.795 22.018 0.51 42.08 H
ATOM 44 HE2BMET A 0 17.217 14.249 22.380 0.51 22.21 H
ATOM 45 HE3BMET A 0 18.343 15.241 22.899 0.51 40.99 H
"""
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model_str(pdb_str, log)
res = occupancy_selections(
model = model,
as_flex_arrays = False)
answer = [[[13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29],
[30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45]]]
assert approx_equal(res, answer)
def exercise_28(verbose):
pdb_str="""\
CRYST1 64.360 64.360 46.038 90.00 90.00 120.00 P 63
ATOM 0 N ASP A 48 8.896 25.394 -7.791 1.00 8.05 N
ATOM 1 CA ASP A 48 8.495 26.452 -6.936 1.00 8.42 C
ATOM 2 C ASP A 48 8.287 26.047 -5.477 1.00 8.20 C
ATOM 3 O ASP A 48 8.309 26.881 -4.579 1.00 10.68 O
ATOM 4 CB ASP A 48 7.216 27.151 -7.426 1.00 9.40 C
ATOM 5 CG ASP A 48 7.457 27.744 -8.791 1.00 10.91 C
ATOM 6 OD1 ASP A 48 8.234 28.729 -8.836 1.00 16.64 O
ATOM 7 OD2 ASP A 48 6.845 27.293 -9.764 1.00 12.53 O
ATOM 8 HA ASP A 48 9.193 27.122 -6.935 1.00 8.42 H
ATOM 9 HB2 ASP A 48 6.494 26.507 -7.490 1.00 9.40 H
ATOM 10 HB3 ASP A 48 6.981 27.867 -6.815 1.00 9.40 H
ATOM 11 H AASP A 48 8.303 25.156 -8.367 0.50 8.04 H
ATOM 12 H BASP A 48 8.242 25.041 -8.223 0.50 8.04 H
ATOM 13 N ALEU A 49 8.083 24.740 -5.245 0.79 7.34 N
ATOM 14 CA ALEU A 49 7.817 24.239 -3.906 0.79 6.67 C
ATOM 15 C ALEU A 49 8.124 22.738 -3.941 0.79 5.81 C
ATOM 16 O ALEU A 49 7.880 22.074 -4.958 0.79 6.71 O
ATOM 17 CB ALEU A 49 6.385 24.559 -3.494 0.79 7.19 C
ATOM 18 CG ALEU A 49 5.914 24.092 -2.111 0.79 7.07 C
ATOM 19 CD1ALEU A 49 4.885 25.059 -1.536 0.79 8.84 C
ATOM 20 CD2ALEU A 49 5.323 22.713 -2.192 0.79 7.46 C
ATOM 21 H ALEU A 49 8.095 24.131 -5.852 0.79 7.25 H
ATOM 22 HA ALEU A 49 8.421 24.661 -3.275 0.79 7.14 H
ATOM 23 HB2ALEU A 49 6.277 25.523 -3.518 0.79 9.16 H
ATOM 24 HB3ALEU A 49 5.791 24.158 -4.147 0.79 9.16 H
ATOM 25 HG ALEU A 49 6.673 24.062 -1.508 0.79 6.91 H
ATOM 26 HD11ALEU A 49 4.592 24.730 -0.672 0.79 9.95 H
ATOM 27 HD12ALEU A 49 5.294 25.933 -1.437 0.79 9.95 H
ATOM 28 HD13ALEU A 49 4.130 25.113 -2.143 0.79 9.95 H
ATOM 29 HD21ALEU A 49 4.960 22.476 -1.324 0.79 8.29 H
ATOM 30 HD22ALEU A 49 4.616 22.710 -2.856 0.79 8.29 H
ATOM 31 HD23ALEU A 49 6.015 22.082 -2.442 0.79 8.29 H
ATOM 32 N BLEU A 49 7.975 24.768 -5.242 0.21 7.25 N
ATOM 33 CA BLEU A 49 7.654 24.205 -3.941 0.21 7.15 C
ATOM 34 C BLEU A 49 8.003 22.716 -3.887 0.21 7.83 C
ATOM 35 O BLEU A 49 7.689 22.025 -4.858 0.21 5.06 O
ATOM 36 CB BLEU A 49 6.162 24.365 -3.605 0.21 9.16 C
ATOM 37 CG BLEU A 49 5.681 23.652 -2.331 0.21 6.91 C
ATOM 38 CD1BLEU A 49 6.301 24.276 -1.095 0.21 9.95 C
ATOM 39 CD2BLEU A 49 4.156 23.640 -2.248 0.21 8.29 C
ATOM 40 H BLEU A 49 7.943 24.178 -5.867 0.21 7.25 H
ATOM 41 HA BLEU A 49 8.173 24.662 -3.262 0.21 7.14 H
ATOM 42 HB2BLEU A 49 5.975 25.310 -3.494 0.21 9.16 H
ATOM 43 HB3BLEU A 49 5.645 24.021 -4.346 0.21 9.16 H
ATOM 44 HG BLEU A 49 5.963 22.725 -2.358 0.21 6.91 H
ATOM 45 HD11BLEU A 49 6.470 23.579 -0.443 0.21 9.95 H
ATOM 46 HD12BLEU A 49 7.132 24.697 -1.346 0.21 9.95 H
ATOM 47 HD13BLEU A 49 5.691 24.937 -0.731 0.21 9.95 H
ATOM 48 HD21BLEU A 49 3.888 23.174 -1.441 0.21 8.29 H
ATOM 49 HD22BLEU A 49 3.834 24.555 -2.225 0.21 8.29 H
ATOM 50 HD23BLEU A 49 3.802 23.184 -3.027 0.21 8.29 H
ATOM 51 N VAL A 50 8.616 22.239 -2.807 1.00 5.93 N
ATOM 52 CA VAL A 50 8.845 20.793 -2.609 1.00 5.53 C
ATOM 53 C VAL A 50 7.981 20.307 -1.457 1.00 5.75 C
ATOM 54 O VAL A 50 7.971 20.912 -0.389 1.00 6.63 O
ATOM 55 CB VAL A 50 10.325 20.527 -2.343 1.00 6.31 C
ATOM 56 CG1 VAL A 50 10.556 19.043 -2.072 1.00 7.62 C
ATOM 57 CG2 VAL A 50 11.170 20.998 -3.512 1.00 7.52 C
ATOM 58 HA VAL A 50 8.593 20.305 -3.404 1.00 5.53 H
ATOM 59 HB VAL A 50 10.599 21.022 -1.555 1.00 6.31 H
ATOM 60 HG11 VAL A 50 11.507 18.860 -2.118 1.00 7.62 H
ATOM 61 HG12 VAL A 50 10.221 18.824 -1.188 1.00 7.62 H
ATOM 62 HG13 VAL A 50 10.087 18.523 -2.744 1.00 7.62 H
ATOM 63 HG21 VAL A 50 12.097 20.765 -3.345 1.00 7.52 H
ATOM 64 HG22 VAL A 50 10.860 20.562 -4.321 1.00 7.52 H
ATOM 65 HG23 VAL A 50 11.081 21.960 -3.600 1.00 7.52 H
ATOM 66 H AVAL A 50 8.830 22.718 -2.125 0.79 5.93 H
ATOM 67 H BVAL A 50 8.914 22.729 -2.166 0.21 5.93 H
TER
END
"""
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model_str(pdb_str, log)
res = occupancy_selections(
model = model,
as_flex_arrays = False)
answer = [ [[11],[12]],
[[13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,66],
[32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,67]]]
assert approx_equal(res, answer)
def exercise_29(verbose):
pdb_str="""\
CRYST1 148.270 44.010 47.390 90.00 101.57 90.00 C 1 2 1
ATOM 0 N GLY A 285 -41.269 16.430 -4.458 1.00 18.77 N
ATOM 1 CA GLY A 285 -41.021 16.772 -5.854 1.00 20.45 C
ATOM 2 H GLY A 285 -42.080 16.182 -4.313 1.00 22.53 H
ATOM 3 C AGLY A 285 -41.133 18.291 -6.119 0.85 20.52 C
ATOM 4 O AGLY A 285 -41.030 18.770 -7.258 0.85 22.89 O
ATOM 5 HA2AGLY A 285 -40.130 16.482 -6.104 0.85 24.54 H
ATOM 6 HA3AGLY A 285 -41.663 16.314 -6.418 0.85 24.54 H
ATOM 7 C BGLY A 285 -40.556 18.155 -6.113 0.15 20.45 C
ATOM 8 O BGLY A 285 -39.925 18.445 -7.127 0.15 21.06 O
ATOM 9 HA2BGLY A 285 -40.352 16.166 -6.208 0.15 24.54 H
ATOM 10 HA3BGLY A 285 -41.839 16.638 -6.357 0.15 24.54 H
ATOM 11 N AASN A 286 -41.375 19.070 -5.066 0.75 20.63 N
ATOM 12 CA AASN A 286 -41.558 20.524 -5.179 0.75 21.34 C
ATOM 13 C AASN A 286 -40.921 21.176 -3.941 0.75 19.76 C
ATOM 14 O AASN A 286 -41.136 20.695 -2.825 0.75 18.94 O
ATOM 15 CB AASN A 286 -43.061 20.822 -5.246 0.75 23.19 C
ATOM 16 CG AASN A 286 -43.390 22.293 -5.087 0.75 24.76 C
ATOM 17 OD1AASN A 286 -43.580 22.784 -3.975 0.75 25.15 O
ATOM 18 ND2AASN A 286 -43.491 22.996 -6.206 0.75 26.38 N
ATOM 19 H AASN A 286 -41.441 18.778 -4.260 0.75 24.76 H
ATOM 20 HA AASN A 286 -41.121 20.863 -5.988 0.75 25.61 H
ATOM 21 HB2AASN A 286 -43.400 20.532 -6.107 0.75 27.82 H
ATOM 22 HB3AASN A 286 -43.509 20.338 -4.535 0.75 27.82 H
ATOM 23 HD21AASN A 286 -43.371 22.614 -6.967 0.75 31.65 H
ATOM 24 HD22AASN A 286 -43.677 23.835 -6.171 0.75 31.65 H
ATOM 25 N BASN A 286 -40.878 19.026 -5.184 0.25 20.30 N
ATOM 26 CA BASN A 286 -40.589 20.401 -5.396 0.25 20.20 C
ATOM 27 C BASN A 286 -40.224 21.016 -4.085 0.25 18.88 C
ATOM 28 O BASN A 286 -40.136 20.364 -3.047 0.25 18.65 O
ATOM 29 CB BASN A 286 -41.798 21.088 -6.023 0.25 22.27 C
ATOM 30 CG BASN A 286 -42.950 21.238 -5.058 0.25 23.28 C
ATOM 31 OD1BASN A 286 -42.781 21.720 -3.938 0.25 23.18 O
ATOM 32 ND2BASN A 286 -44.137 20.828 -5.491 0.25 24.35 N
ATOM 33 H BASN A 286 -41.259 18.841 -4.435 0.25 24.36 H
ATOM 34 HA BASN A 286 -39.828 20.488 -6.007 0.25 24.24 H
ATOM 35 HB2BASN A 286 -41.538 21.974 -6.321 0.25 26.72 H
ATOM 36 HB3BASN A 286 -42.105 20.561 -6.777 0.25 26.72 H
ATOM 37 HD21BASN A 286 -44.216 20.499 -6.282 0.25 29.22 H
ATOM 38 HD22BASN A 286 -44.826 20.891 -4.981 0.25 29.22 H
ATOM 39 CA GLU A 287 -39.388 22.905 -3.000 1.00 16.67 C
ATOM 40 C GLU A 287 -40.376 23.372 -1.952 1.00 15.65 C
ATOM 41 O GLU A 287 -40.132 23.201 -0.755 1.00 14.31 O
ATOM 42 CB GLU A 287 -38.514 24.074 -3.481 1.00 17.80 C
ATOM 43 CG GLU A 287 -37.273 23.645 -4.302 1.00 19.41 C
ATOM 44 CD GLU A 287 -36.290 24.789 -4.558 1.00 20.84 C
ATOM 45 OE1 GLU A 287 -36.554 25.925 -4.128 1.00 21.26 O
ATOM 46 OE2 GLU A 287 -35.220 24.552 -5.185 1.00 22.93 O
ATOM 47 HB2 GLU A 287 -39.052 24.654 -4.041 1.00 21.36 H
ATOM 48 HB3 GLU A 287 -38.200 24.566 -2.707 1.00 21.36 H
ATOM 49 HG2 GLU A 287 -36.801 22.949 -3.818 1.00 23.29 H
ATOM 50 HG3 GLU A 287 -37.568 23.308 -5.163 1.00 23.29 H
ATOM 51 N AGLU A 287 -40.109 22.235 -4.122 0.02 18.26 N
ATOM 52 H AGLU A 287 -39.954 22.592 -4.889 0.02 21.91 H
ATOM 53 HA AGLU A 287 -38.796 22.250 -2.576 0.02 20.01 H
ATOM 54 N BGLU A 287 -40.017 22.305 -4.119 0.98 18.44 N
ATOM 55 H BGLU A 287 -40.228 22.836 -4.762 0.98 22.13 H
ATOM 56 HA BGLU A 287 -38.799 22.245 -2.580 0.98 20.01 H
TER
END
"""
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model_str(pdb_str, log)
res = occupancy_selections(
model = model,
as_flex_arrays = False)
answer = [ [[3,4,5,6,19],
[7,8,9,10,33]],
[[11,12,13,14,15,16,17,18,20,21,22,23,24,52],
[25,26,27,28,29,30,31,32,34,35,36,37,38,55]],
[[51,53],
[54,56]]]
assert approx_equal(res, answer)
def exercise_30(verbose):
pdb_str="""\
CRYST1 42.198 121.958 37.277 90.00 90.00 90.00 P 21 21 2
ATOM 0 CG GLU A 115 30.700 22.521 0.401 0.55 25.56 C
ATOM 1 CD GLU A 115 31.809 23.320 -0.265 1.00 25.96 C
ATOM 2 OE1 GLU A 115 32.842 22.797 -0.723 1.00 24.92 O
ATOM 3 OE2 GLU A 115 31.621 24.544 -0.376 1.00 27.30 O
ATOM 4 N AGLU A 115 27.819 20.841 -1.012 0.44 19.61 N
ATOM 5 CA AGLU A 115 28.757 21.222 -0.004 0.44 20.79 C
ATOM 6 C AGLU A 115 28.192 21.930 1.203 0.44 19.50 C
ATOM 7 O AGLU A 115 27.475 22.922 1.098 0.44 20.38 O
ATOM 8 CB AGLU A 115 29.799 22.079 -0.601 0.44 23.59 C
ATOM 9 N BGLU A 115 27.018 20.969 -0.446 0.56 27.49 N
ATOM 10 CA BGLU A 115 28.194 21.387 0.311 0.56 26.06 C
ATOM 11 C BGLU A 115 27.541 21.859 1.611 0.56 25.00 C
ATOM 12 O BGLU A 115 26.660 22.715 1.640 0.56 26.43 O
ATOM 13 CB BGLU A 115 29.189 22.459 -0.356 0.56 26.03 C
ATOM 14 N AVAL A 116 28.585 21.407 2.363 0.53 19.29 N
ATOM 15 CA AVAL A 116 28.181 21.931 3.670 0.53 18.27 C
ATOM 16 C AVAL A 116 29.427 21.990 4.589 0.53 17.81 C
ATOM 17 O AVAL A 116 30.464 21.420 4.280 0.53 17.67 O
ATOM 18 CB AVAL A 116 27.090 21.046 4.342 0.53 20.31 C
ATOM 19 CG1AVAL A 116 25.743 21.168 3.633 0.53 22.78 C
ATOM 20 CG2AVAL A 116 27.498 19.598 4.395 0.53 20.85 C
ATOM 21 H AVAL A 116 29.104 20.724 2.421 0.53 23.15 H
ATOM 22 HA AVAL A 116 27.827 22.838 3.564 0.53 21.92 H
ATOM 23 HB AVAL A 116 26.967 21.353 5.264 0.53 24.37 H
ATOM 24 N BVAL A 116 27.987 21.231 2.690 0.47 21.87 N
ATOM 25 CA BVAL A 116 27.614 21.560 4.041 0.47 19.86 C
ATOM 26 C BVAL A 116 28.915 21.857 4.746 0.47 19.34 C
ATOM 27 O BVAL A 116 29.983 21.603 4.213 0.47 18.81 O
ATOM 28 CB BVAL A 116 26.938 20.336 4.707 0.47 19.81 C
ATOM 29 CG1BVAL A 116 25.591 20.061 4.058 0.47 21.33 C
ATOM 30 CG2BVAL A 116 27.825 19.086 4.627 0.47 19.25 C
ATOM 31 H BVAL A 116 28.539 20.573 2.651 0.47 26.24 H
ATOM 32 HA BVAL A 116 27.021 22.340 4.070 0.47 23.83 H
ATOM 33 HB BVAL A 116 26.782 20.535 5.654 0.47 23.76 H
TER
END
"""
if (verbose): log = sys.stdout
else: log = StringIO()
model = get_model_str(pdb_str, log)
res = occupancy_selections(
model = model,
as_flex_arrays = False)
answer = [ [[0]],
[[4, 5, 6, 7, 8, 21],
[9, 10, 11, 12, 13, 31]],
[[14, 15, 16, 17, 18, 19, 20, 22, 23],
[24, 25, 26, 27, 28, 29, 30, 32, 33]] ]
assert approx_equal(res, answer)
def prepare_correlated_occupancy_inputs(
prefix="tst_group_correlated_occupancy",
create_mtz=False,
d_min=1.0):
pdb_raw = """\
CRYST1 21.937 4.866 23.477 90.00 107.08 90.00 P 1 21 1
SCALE1 0.045585 0.000000 0.014006 0.00000
SCALE2 0.000000 0.205508 0.000000 0.00000
SCALE3 0.000000 0.000000 0.044560 0.00000
ATOM 1 N GLY A 1 -9.056 4.638 6.050 1.00 16.77 N
ATOM 2 CA GLY A 1 -9.058 4.194 4.668 1.00 16.57 C
ATOM 3 C GLY A 1 -7.993 3.144 4.430 1.00 16.16 C
ATOM 4 O GLY A 1 -7.521 2.511 5.374 1.00 16.78 O
ATOM 5 N ASN A 2 -7.616 2.953 3.169 1.00 15.02 N
ATOM 6 CA ASN A 2 -6.526 2.044 2.840 1.00 14.10 C
ATOM 7 C ASN A 2 -5.216 2.527 3.434 1.00 13.13 C
ATOM 8 O ASN A 2 -4.943 3.727 3.466 1.00 11.91 O
ATOM 9 CB ASN A 2 -6.382 1.888 1.330 1.00 15.38 C
ATOM 10 CG ASN A 2 -7.632 1.344 0.685 1.00 14.08 C
ATOM 11 OD1 ASN A 2 -8.042 0.216 0.957 1.00 17.46 O
ATOM 12 ND2 ASN A 2 -8.247 2.142 -0.178 1.00 11.72 N
ATOM 13 N ASN A 3 -4.405 1.583 3.898 1.00 12.26 N
ATOM 14 CA ASN A 3 -3.172 1.915 4.595 1.00 11.74 C
ATOM 15 C ASN A 3 -1.922 1.362 3.915 1.00 11.10 C
ATOM 16 O ASN A 3 -1.816 0.158 3.672 1.00 10.42 O
ATOM 17 CB ASN A 3 -3.243 1.409 6.039 1.00 12.15 C
ATOM 18 CG ASN A 3 -2.000 1.749 6.841 1.00 12.82 C
ATOM 19 OD1 ASN A 3 -1.705 2.920 7.082 1.00 15.05 O
ATOM 20 ND2 ASN A 3 -1.272 0.724 7.270 1.00 13.48 N
ATOM 21 N GLN A 4 -0.987 2.256 3.598 1.00 10.29 N
ATOM 22 CA GLN A 4 0.361 1.860 3.201 1.00 10.53 C
ATOM 23 C GLN A 4 1.398 2.605 4.031 1.00 10.24 C
ATOM 24 O GLN A 4 1.454 3.834 4.025 1.00 8.86 O
ATOM 25 CB GLN A 4 0.626 2.117 1.712 1.00 9.80 C
ATOM 26 CG GLN A 4 1.924 1.459 1.221 1.00 10.25 C
ATOM 27 CD GLN A 4 2.465 2.050 -0.073 1.00 12.43 C
ATOM 28 OE1 GLN A 4 2.674 3.260 -0.178 1.00 14.62 O
ATOM 29 NE2 GLN A 4 2.708 1.192 -1.059 1.00 9.05 N
ATOM 30 N AGLN A 5 2.202 1.848 4.775 0.62 10.38 N
ATOM 31 CA AGLN A 5 3.288 2.419 5.569 0.62 11.39 C
ATOM 32 C AGLN A 5 4.638 1.844 5.123 0.62 11.52 C
ATOM 33 O AGLN A 5 4.824 0.625 5.095 0.62 12.05 O
ATOM 34 CB AGLN A 5 3.046 2.170 7.063 0.62 11.96 C
ATOM 35 CG AGLN A 5 1.854 2.946 7.622 0.62 10.81 C
ATOM 36 CD AGLN A 5 1.361 2.406 8.951 0.62 13.10 C
ATOM 37 OE1AGLN A 5 0.800 1.312 9.019 0.62 10.65 O
ATOM 38 NE2AGLN A 5 1.562 3.175 10.016 0.62 12.30 N
ATOM 39 N BGLN A 5 2.239 1.858 4.725 0.38 10.38 N
ATOM 40 CA BGLN A 5 3.326 2.476 5.450 0.38 11.39 C
ATOM 41 C BGLN A 5 4.639 1.850 5.057 0.38 11.52 C
ATOM 42 O BGLN A 5 4.814 0.627 5.020 0.38 12.05 O
ATOM 43 CB BGLN A 5 3.110 2.331 6.919 0.38 11.96 C
ATOM 44 CG BGLN A 5 2.695 0.980 7.141 0.38 10.81 C
ATOM 45 CD BGLN A 5 2.882 0.618 8.479 0.38 13.10 C
ATOM 46 OE1BGLN A 5 2.538 1.369 9.406 0.38 10.65 O
ATOM 47 NE2BGLN A 5 3.380 -0.597 8.664 0.38 12.30 N
ATOM 48 N ASN A 6 5.565 2.732 4.753 1.00 11.99 N
ATOM 49 CA ASN A 6 6.868 2.339 4.280 1.00 12.30 C
ATOM 50 C ASN A 6 7.881 2.785 5.302 1.00 13.40 C
ATOM 51 O ASN A 6 8.262 3.954 5.351 1.00 13.92 O
ATOM 52 CB ASN A 6 7.133 2.954 2.915 1.00 12.13 C
ATOM 53 CG ASN A 6 5.988 2.721 1.955 1.00 12.77 C
ATOM 54 OD1 ASN A 6 5.795 1.608 1.466 1.00 14.27 O
ATOM 55 ND2 ASN A 6 5.211 3.764 1.690 1.00 10.07 N
ATOM 56 N ATYR A 7 8.304 1.849 6.146 0.59 14.70 N
ATOM 57 CA ATYR A 7 9.167 2.166 7.280 0.59 15.18 C
ATOM 58 C ATYR A 7 10.622 2.326 6.868 0.59 15.91 C
ATOM 59 O ATYR A 7 11.054 1.799 5.844 0.59 15.76 O
ATOM 60 CB ATYR A 7 9.044 1.086 8.356 0.59 15.35 C
ATOM 61 CG ATYR A 7 7.640 0.946 8.887 0.59 14.45 C
ATOM 62 CD1ATYR A 7 6.759 0.027 8.335 0.59 15.68 C
ATOM 63 CD2ATYR A 7 7.187 1.750 9.924 0.59 14.80 C
ATOM 64 CE1ATYR A 7 5.469 -0.098 8.810 0.59 13.46 C
ATOM 65 CE2ATYR A 7 5.899 1.633 10.407 0.59 14.33 C
ATOM 66 CZ ATYR A 7 5.044 0.707 9.845 0.59 15.09 C
ATOM 67 OH ATYR A 7 3.759 0.583 10.319 0.59 14.39 O
ATOM 68 OXTATYR A 7 11.394 2.990 7.558 0.59 17.49 O
ATOM 70 N BTYR A 7 8.323 1.843 6.116 0.41 14.70 N
ATOM 71 CA BTYR A 7 9.149 2.183 7.247 0.41 15.18 C
ATOM 72 C BTYR A 7 10.629 2.316 6.861 0.41 15.91 C
ATOM 73 O BTYR A 7 11.084 1.756 5.864 0.41 15.76 O
ATOM 74 CB BTYR A 7 8.954 1.147 8.348 0.41 15.35 C
ATOM 75 CG BTYR A 7 9.942 1.356 9.417 0.41 14.45 C
ATOM 76 CD1BTYR A 7 9.807 2.381 10.320 0.41 15.68 C
ATOM 77 CD2BTYR A 7 11.054 0.580 9.473 0.41 14.80 C
ATOM 78 CE1BTYR A 7 10.746 2.569 11.248 0.41 13.46 C
ATOM 79 CE2BTYR A 7 11.968 0.749 10.405 0.41 14.33 C
ATOM 80 CZ BTYR A 7 11.858 1.724 11.252 0.41 15.09 C
ATOM 81 OH BTYR A 7 12.921 1.747 12.113 0.41 14.39 O
ATOM 82 OXTBTYR A 7 11.408 3.001 7.529 0.41 17.49 O
TER
HETATM 83 O HOH A 8 -6.471 5.227 7.124 1.00 22.62 O
HETATM 84 O HOH A 9 10.431 1.858 3.216 1.00 19.71 O
HETATM 85 O HOH A 10 -11.286 1.756 -1.468 1.00 17.08 O
HETATM 86 O AHOH A 11 11.808 4.179 9.970 0.60 23.99 O
HETATM 87 O HOH A 12 13.605 1.327 9.198 1.00 26.17 O
HETATM 88 O HOH A 13 -2.749 3.429 10.024 1.00 39.15 O
HETATM 89 O HOH A 14 -1.500 0.682 10.967 1.00 43.49 O
TER
"""
pdb_in = "%s_in.pdb" % prefix
open(pdb_in, "w").write(pdb_raw)
if (create_mtz):
args = [
pdb_in,
"high_resolution=%g" % d_min,
"type=real",
"label=F",
"add_sigmas=True",
"r_free_flags_fraction=0.1",
"random_seed=12345",
"output.file_name=%s.mtz" % prefix,
]
fmodel.run(args=args, log=null_out())
pdb_file = file_reader.any_file(pdb_in)
hierarchy = pdb_file.file_object.hierarchy
xrs = pdb_file.file_object.xray_structure_simple()
for atom in hierarchy.atoms():
atom.b = 5
if (atom.occ < 1.0):
atom.occ = 0.5
open("%s_start.pdb" % prefix, "w").write(
hierarchy.as_pdb_string(crystal_symmetry=xrs))
def exercise_regroup_3d(verbose):
if (verbose): log = sys.stdout
else: log = StringIO()
prepare_correlated_occupancy_inputs()
# File #1 (with homogenized occupancies) should work
# File #2 should fail due to inconsistent occupancies
pdb_files = [
"tst_group_correlated_occupancy_start.pdb",
"tst_group_correlated_occupancy_in.pdb",
]
for i_file, pdb_file in enumerate(pdb_files):
model = get_model(pdb_file, log)
try :
constraint_groups = occupancy_selections(
model = model,
constrain_correlated_3d_groups=True,
log=null_out())
except Sorry as s :
if (i_file == 0):
raise
else :
assert ("Inconsistent occupancies" in str(s)), str(s)
else :
if (i_file == 1):
raise Exception_expected
else :
assert (len(constraint_groups) == 1)
def run():
verbose = "--verbose" in sys.argv[1:]
exercise_00(verbose=verbose)
exercise_01(verbose=verbose)
exercise_02(verbose=verbose)
exercise_03(verbose=verbose)
exercise_05(verbose=verbose)
exercise_06(verbose=verbose)
exercise_07(verbose=verbose)
exercise_08(verbose=verbose)
exercise_09(verbose=verbose)
exercise_10(verbose=verbose)
exercise_11(verbose=verbose)
exercise_12(verbose=verbose)
exercise_13(verbose=verbose)
exercise_14(verbose=verbose)
exercise_15(verbose=verbose)
exercise_16(verbose=verbose)
exercise_17(verbose=verbose)
exercise_18(verbose=verbose)
exercise_19(verbose=verbose)
exercise_20(verbose=verbose)
exercise_21(verbose=verbose)
exercise_22(verbose=verbose)
exercise_23(verbose=verbose)
exercise_24(verbose=verbose)
exercise_25(verbose=verbose)
exercise_26(verbose=verbose)
exercise_27(verbose=verbose)
exercise_28(verbose=verbose)
exercise_29(verbose=verbose)
exercise_30(verbose=verbose)
exercise_regroup_3d(verbose=verbose)
print(format_cpu_times())
if (__name__ == "__main__"):
run()
| [
"[email protected]"
] | |
35488866c24bd360ea370d1014afbe7e4ed4e555 | b33d1d4b74d375a2050baf80cda5b8571aff7462 | /s14/day01/homework2.py | 1a595d08faafbc21bfeba3287a464e606179d299 | [] | no_license | sunwang33/code | e979e1b11209200fba07a99d926d76f09c83b514 | 377f3e919555bf0f02ef56c9395d57992c84fcfd | refs/heads/master | 2021-01-16T18:10:08.358744 | 2018-01-01T02:58:43 | 2018-01-01T02:58:43 | 100,045,002 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,440 | py | # Author:Sun Wang
menu = {
'北京':{
'海淀':{
'五道口':{
'soho':{},
'网易':{},
'google':{}
},
'中关村':{
'爱奇艺':{},
'汽车之家':{},
'youku':{},
},
'上地':{
'百度':{},
},
},
'昌平':{
'沙河':{
'老男孩':{},
'北航':{},
},
'天通苑':{},
'回龙观':{},
},
'朝阳':{},
'东城':{},
},
'上海':{
'闵行':{
"人民广场":{
'炸鸡店':{}
}
},
'闸北':{
'火车战':{
'携程':{}
}
},
'浦东':{},
},
'山东':{},
}
exit_flag = False
while not exit_flag :
for item in menu:
print (item)
choise = input("Please input your choise: ")
if choise in menu:
while not exit_flag:
for i in menu[choise]:
print ("\t",i)
choise1 = input("Please input your choise1: ")
if choise1 in menu[choise]:
while not exit_flag:
for i1 in menu[choise][choise1]:
print ("\t",i1)
choise2 = input("Please input your choise2: ")
if choise2 in menu[choise][choise1]:
while not exit_flag:
for i2 in menu[choise][choise1][choise2]:
print ("\t\t",i2)
choise3 = input("Please input your choise3: ")
if choise3 in menu[choise][choise1][choise2]:
while not exit_flag:
for i3 in menu[choise][choise1][choise2][choise3]:
print ("\t\t\t",i3)
if choise3 == 'q':
exit_flag = True
elif choise3 == 'b':
break
if choise2 == 'b':
break
if choise1 == 'b':
break
if choise == 'b':
break
| [
"[email protected]"
] | |
783c3f96c270a8323efbe58ab9ad72e3ffc8e029 | 1c6a7125c8ea024050045fb18a685daadcfbcb0f | /codeforces/random/B_Equal_Candies.py | 5666b59c9804f96384bfdd8bf152e6b93b45323e | [] | no_license | HurayraIIT/competitive-programming | 0e2f40cf1cae76129eac0cd8402b62165a6c29e4 | 3b9bc3066c70284cddab0f3e39ffc3e9cd59225f | refs/heads/master | 2022-12-10T18:33:10.405727 | 2022-12-06T13:15:15 | 2022-12-06T13:15:15 | 236,779,058 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 760 | py | # Abu Hurayra
import sys
from collections import defaultdict
# import threading
# threading.stack_size(2**27)
# sys.setrecursionlimit(2**21)
def rs(): return sys.stdin.readline().rstrip()
def ri(): return int(sys.stdin.readline())
def ria(): return list(map(int, sys.stdin.readline().split()))
def ws(s): sys.stdout.write(s + '\n')
def wi(n): sys.stdout.write(str(n) + '\n')
def wia(a): sys.stdout.write(' '.join([str(x) for x in a]) + '\n')
# a = list(map(int, input().split()))
def main():
t = ri()
for _ in range(t):
n = ri()
a = ria()
m = min(a)
ans = 0
for i in a:
ans += i - m
print(ans)
if __name__ == '__main__':
# t = threading.Thread(target=main)
# t.start()
# t.join()
main()
| [
"[email protected]"
] | |
24eab0073b819cc196e8f7657f4052507436ad3f | 007f7d8c93725457bc5692715587227d6c8acc0c | /blender/.blender/scripts/renameobjectbyblock.py | eeea815c650127d2b64e7c557b1b425a00e90a67 | [
"GPL-2.0-only",
"PSF-2.0",
"GPL-1.0-or-later",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | Nicoeevee/sketchfab_download | cf1c72ab45a88bebb0e08d7fb984fa01a3be97fa | a81ad3a2053e715608e657fd62c9dc1194ffe290 | refs/heads/master | 2023-04-21T08:05:28.322657 | 2021-05-13T18:01:30 | 2021-05-13T18:01:30 | 354,547,290 | 0 | 0 | Apache-2.0 | 2021-05-14T12:04:21 | 2021-04-04T13:13:28 | Python | UTF-8 | Python | false | false | 4,863 | py | #!BPY
""" Registration info for Blender menus: <- these words are ignored
Name: 'Object Name Editor'
Blender: 232
Group: 'Object'
Tip: 'GUI to select and rename objects.'
"""
__author__ = "Jean-Michel Soler (jms)"
__url__ = ("blender", "blenderartists.org",
"Script's homepage, http://jmsoler.free.fr/didacticiel/blender/tutor/cpl_renameobjectgui.htm",
"Communicate problems and errors, http://www.zoo-logique.org/3D.Blender/newsportal/thread.php?group=3D.Blender")
__version__ = "233"
__bpydoc__ = """\
This script offers a GUI to rename selected objects according to a given
rule.
Usage:
Open it from the 3d View's "Object->Scripts" menu and select the objects to
rename and the rule from the buttons in its GUI.
"""
# ----------------------------------------------------------
# Name OBJECT changer
# (c) 2004 jean-michel soler
# -----------------------------------------------------------
#----------------------------------------------
# Page officielle/offcial page du blender python Name OBJECT changer:
# http://jmsoler.free.fr/didacticiel/blender/tutor/cpl_renameobjectgui.htm
# Communiquer les problemes et erreurs sur:
# To Communicate problems and errors on:
# http://www.zoo-logique.org/3D.Blender/newsportal/thread.php?group=3D.Blender
#---------------------------------------------
# Blender Artistic License
# http://download.blender.org/documentation/html/x21254.html
#---------------------------------------------
CVS=0
import Blender
from Blender import *
from Blender.Draw import *
from Blender.BGL import *
O = list(Scene.GetCurrent().objects)
stringlist=[[],[]]
def renew():
global O
#O = Object.Get()
O = list(Scene.GetCurrent().objects)
#param= [ [p.name, i, p.getType()] for i, p in enumerate(O) ]
PARAM={}
evt=9
stringlist=[[],[],[]]
for i, ob in enumerate(O):
obname= ob.name
PARAM[obname] = [Create(ob.sel), evt, i, ob.getType(), Create(obname), evt+1, ob]
stringlist[0].append(evt+1)
stringlist[1].append(obname)
stringlist[2].append(evt)
evt+=2
return PARAM,stringlist
NEWNAME=Create('Name')
alignment={'BEGIN' : [Create(1),5],
'END' : [Create(0),6],
'POINT' : [Create(0),7],
'FULL' : [Create(0),8]}
def rename():
global NEWNAME, alignment, O, PARAM, stringlist
newname= NEWNAME.val
for obname, value in PARAM.iteritems():
if value[0].val: # Selected
if alignment['END'][0].val:
value[6].setName(obname+newname)
elif alignment['BEGIN'][0].val:
value[6].setName(newname+obname)
elif alignment['FULL'][0].val:
value[6].setName(newname)
PARAM, stringlist = renew()
PARAM, stringlist = renew()
def EVENT(evt,val):
pass
def BUTTON(evt):
global PARAM , alignment, O, stringlist, CVS
if (evt==1):
Exit()
elif (evt==2):
rename()
elif (evt==3):
PARAM, stringlist = renew()
elif (evt in [5,6,7,8]):
for k in alignment.iterkeys():
if alignment[k][1]!=evt:
alignment[k][0].val=0
elif (evt in stringlist[0]):
O[PARAM[stringlist[1][(evt-9)/2]][2]].setName(PARAM[stringlist[1][(evt-9)/2]][4].val)
PARAM, stringlist = renew()
elif (evt in stringlist[2]):
try:
O[PARAM[stringlist[1][(evt-9)/2]][2]].select(PARAM[stringlist[1][(evt-9)/2]][0].val)
except:
pass
Blender.Redraw()
def DRAW():
global PARAM, O, NEWNAME, alignment
#glColor3f(0.7, 0.7, 0.7)
glClear(GL_COLOR_BUFFER_BIT)
glColor3f(0.1, 0.1, 0.15)
size=Buffer(GL_FLOAT, 4)
glGetFloatv(GL_SCISSOR_BOX, size)
size= size.list
for s in [0,1,2,3]: size[s]=int(size[s])
ligne=20
Button ("Exit",1,20,1,80,ligne)
Button ("Rename",2,102,1,80,ligne)
Button ("Renew",3,184,1,80,ligne)
glRasterPos2f(20, ligne*2-10)
Text("Object Name Editor")
NEWNAME=String('Add String: ', 4, 150, ligne*2-16, 150, 18, NEWNAME.val,120 )
key= alignment.keys()
key.sort()
n=150+150+4
for k in key:
alignment[k][0]= Toggle(k,alignment[k][1],n,ligne*2-16, 40, 18, alignment[k][0].val)
n+=40+4
max=size[3] / 22 -2
pos = 0
decal = 20
keys=[[PARAM[k][1],k] for k in PARAM.iterkeys()]
keys.sort()
for p_ in keys:
p=p_[1]
if pos==max:
decal+=152
pos=1
else:
pos+=1
PARAM[p][0]=Toggle('S',PARAM[p][1],decal,pos*22+22,20,20, PARAM[p][0].val,"Select this one for a group renaming")
PARAM[p][4]=String('',PARAM[p][5],decal+20,pos*22+22,90,20, PARAM[p][4].val,200, "string button to rename immediately but only this object")
glRasterPos2f(decal+115,pos*22+24)
Text(PARAM[p][3][:4])
if __name__=='__main__':
Register(DRAW,EVENT,BUTTON)
| [
"[email protected]"
] | |
72cd4e6fde17b03b12738b441ec9e9f9e86204b8 | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /_REPO/MICROSOFT/computervision-recipes/utils_cv/detection/references/utils.py | b477c887d61384e3e724535c9061f46326984030 | [
"BSD-3-Clause",
"LGPL-2.1-or-later",
"Apache-2.0",
"MIT"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 10,311 | py | from __future__ import print_function
from collections import defaultdict, deque
import datetime
import pickle
import time
import torch
import torch.distributed as dist
import errno
import os
class SmoothedValue(object):
"""Track a series of values and provide access to smoothed values over a
window or the global series average.
"""
def __init__(self, window_size=20, fmt=None):
if fmt is None:
fmt = "{median:.4f} ({global_avg:.4f})"
self.deque = deque(maxlen=window_size)
self.total = 0.0
self.count = 0
self.fmt = fmt
def update(self, value, n=1):
self.deque.append(value)
self.count += n
self.total += value * n
def synchronize_between_processes(self):
"""
Warning: does not synchronize the deque!
"""
if not is_dist_avail_and_initialized():
return
t = torch.tensor([self.count, self.total], dtype=torch.float64, device="cuda")
dist.barrier()
dist.all_reduce(t)
t = t.tolist()
self.count = int(t[0])
self.total = t[1]
@property
def median(self):
d = torch.tensor(list(self.deque))
return d.median().item()
@property
def avg(self):
d = torch.tensor(list(self.deque), dtype=torch.float32)
return d.mean().item()
@property
def global_avg(self):
return self.total / self.count
@property
def max(self):
return max(self.deque)
@property
def value(self):
return self.deque[-1]
def __str__(self):
return self.fmt.format(
median=self.median,
avg=self.avg,
global_avg=self.global_avg,
max=self.max,
value=self.value,
)
def all_gather(data):
"""
Run all_gather on arbitrary picklable data (not necessarily tensors)
Args:
data: any picklable object
Returns:
list[data]: list of data gathered from each rank
"""
world_size = get_world_size()
if world_size == 1:
return [data]
# serialized to a Tensor
buffer = pickle.dumps(data)
storage = torch.ByteStorage.from_buffer(buffer)
tensor = torch.ByteTensor(storage).to("cuda")
# obtain Tensor size of each rank
local_size = torch.tensor([tensor.numel()], device="cuda")
size_list = [torch.tensor([0], device="cuda") for _ in range(world_size)]
dist.all_gather(size_list, local_size)
size_list = [int(size.item()) for size in size_list]
max_size = max(size_list)
# receiving Tensor from all ranks
# we pad the tensor because torch all_gather does not support
# gathering tensors of different shapes
tensor_list = []
for _ in size_list:
tensor_list.append(torch.empty((max_size,), dtype=torch.uint8, device="cuda"))
if local_size != max_size:
padding = torch.empty(
size=(max_size - local_size,), dtype=torch.uint8, device="cuda"
)
tensor = torch.cat((tensor, padding), dim=0)
dist.all_gather(tensor_list, tensor)
data_list = []
for size, tensor in zip(size_list, tensor_list):
buffer = tensor.cpu().numpy().tobytes()[:size]
data_list.append(pickle.loads(buffer))
return data_list
def reduce_dict(input_dict, average=True):
"""
Args:
input_dict (dict): all the values will be reduced
average (bool): whether to do average or sum
Reduce the values in the dictionary from all processes so that all processes
have the averaged results. Returns a dict with the same fields as
input_dict, after reduction.
"""
world_size = get_world_size()
if world_size < 2:
return input_dict
with torch.no_grad():
names = []
values = []
# sort the keys so that they are consistent across processes
for k in sorted(input_dict.keys()):
names.append(k)
values.append(input_dict[k])
values = torch.stack(values, dim=0)
dist.all_reduce(values)
if average:
values /= world_size
reduced_dict = {k: v for k, v in zip(names, values)}
return reduced_dict
class MetricLogger(object):
def __init__(self, delimiter="\t"):
self.meters = defaultdict(SmoothedValue)
self.delimiter = delimiter
def update(self, **kwargs):
for k, v in kwargs.items():
if isinstance(v, torch.Tensor):
v = v.item()
assert isinstance(v, (float, int))
self.meters[k].update(v)
def __getattr__(self, attr):
if attr in self.meters:
return self.meters[attr]
if attr in self.__dict__:
return self.__dict__[attr]
raise AttributeError(
"'{}' object has no attribute '{}'".format(type(self).__name__, attr)
)
def __str__(self):
loss_str = []
for name, meter in self.meters.items():
loss_str.append("{}: {}".format(name, str(meter)))
return self.delimiter.join(loss_str)
def synchronize_between_processes(self):
for meter in self.meters.values():
meter.synchronize_between_processes()
def add_meter(self, name, meter):
self.meters[name] = meter
def log_every(self, iterable, print_freq, header=None):
i = 0
if not header:
header = ""
start_time = time.time()
end = time.time()
iter_time = SmoothedValue(fmt="{avg:.4f}")
data_time = SmoothedValue(fmt="{avg:.4f}")
space_fmt = ":" + str(len(str(len(iterable)))) + "d"
if torch.cuda.is_available():
log_msg = self.delimiter.join(
[
header,
"[{0" + space_fmt + "}/{1}]",
"eta: {eta}",
"{meters}",
"time: {time}",
"data: {data}",
"max mem: {memory:.0f}",
]
)
else:
log_msg = self.delimiter.join(
[
header,
"[{0" + space_fmt + "}/{1}]",
"eta: {eta}",
"{meters}",
"time: {time}",
"data: {data}",
]
)
MB = 1024.0 * 1024.0
for obj in iterable:
data_time.update(time.time() - end)
yield obj
iter_time.update(time.time() - end)
if i % print_freq == 0 or i == len(iterable) - 1:
eta_seconds = iter_time.global_avg * (len(iterable) - i)
eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))
if torch.cuda.is_available():
print(
log_msg.format(
i,
len(iterable),
eta=eta_string,
meters=str(self),
time=str(iter_time),
data=str(data_time),
memory=torch.cuda.max_memory_allocated() / MB,
)
)
else:
print(
log_msg.format(
i,
len(iterable),
eta=eta_string,
meters=str(self),
time=str(iter_time),
data=str(data_time),
)
)
i += 1
end = time.time()
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print(
"{} Total time: {} ({:.4f} s / it)".format(
header, total_time_str, total_time / len(iterable)
)
)
def collate_fn(batch):
return tuple(zip(*batch))
def warmup_lr_scheduler(optimizer, warmup_iters, warmup_factor):
def f(x):
if x >= warmup_iters:
return 1
alpha = float(x) / warmup_iters
return warmup_factor * (1 - alpha) + alpha
return torch.optim.lr_scheduler.LambdaLR(optimizer, f)
def mkdir(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def setup_for_distributed(is_master):
"""
This function disables printing when not in master process
"""
import builtins as __builtin__
builtin_print = __builtin__.print
def print(*args, **kwargs):
force = kwargs.pop("force", False)
if is_master or force:
builtin_print(*args, **kwargs)
__builtin__.print = print
def is_dist_avail_and_initialized():
if not dist.is_available():
return False
if not dist.is_initialized():
return False
return True
def get_world_size():
if not is_dist_avail_and_initialized():
return 1
return dist.get_world_size()
def get_rank():
if not is_dist_avail_and_initialized():
return 0
return dist.get_rank()
def is_main_process():
return get_rank() == 0
def save_on_master(*args, **kwargs):
if is_main_process():
torch.save(*args, **kwargs)
def init_distributed_mode(args):
if "RANK" in os.environ and "WORLD_SIZE" in os.environ:
args.rank = int(os.environ["RANK"])
args.world_size = int(os.environ["WORLD_SIZE"])
args.gpu = int(os.environ["LOCAL_RANK"])
elif "SLURM_PROCID" in os.environ:
args.rank = int(os.environ["SLURM_PROCID"])
args.gpu = args.rank % torch.cuda.device_count()
else:
print("Not using distributed mode")
args.distributed = False
return
args.distributed = True
torch.cuda.set_device(args.gpu)
args.dist_backend = "nccl"
print(
"| distributed init (rank {}): {}".format(args.rank, args.dist_url), flush=True
)
torch.distributed.init_process_group(
backend=args.dist_backend,
init_method=args.dist_url,
world_size=args.world_size,
rank=args.rank,
)
torch.distributed.barrier()
setup_for_distributed(args.rank == 0)
| [
"[email protected]"
] | |
960540a6f9a5e5fdc7c3bb222cfbfd59bf548e8d | bf2d010229aece071359662f4fef44e48ba57951 | /dynamic_range_time_step_plot.py | ce67b22b30e65b9b6c4f46b93df1df6ec14a9916 | [] | no_license | Osrip/CriticalEvolution | b97398f74e2fc5b54c9ab92765b08ce3bf97257e | f77cae8acc626cb4c6d64d5a44fdf00310309c2e | refs/heads/master | 2021-06-24T03:44:03.283017 | 2021-04-03T13:09:42 | 2021-04-03T13:09:42 | 215,332,038 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,728 | py | import os
import numpy as np
from automatic_plot_helper import load_isings_specific_path
from automatic_plot_helper import attribute_from_isings
from automatic_plot_helper import all_folders_in_dir_with
import copy
import pandas as pd
import glob
import pickle
from run_combi import RunCombi
import matplotlib.pylab as plt
from matplotlib.lines import Line2D
import seaborn as sns
import re
from isolated_population_helper import seperate_isolated_populations
def plot_dynamic_range(sim_name, plot_settings):
attrs_list_each_food_num_all, attrs_list_each_food_num_critical, attrs_list_each_food_num_sub_critcal, food_num_list = load_data(plot_settings['attr'], sim_name)
# plot_averages(attrs_list_each_food_num_all, food_num_list, sim_name, plot_settings)
plot_seperated_averages(attrs_list_each_food_num_critical, attrs_list_each_food_num_sub_critcal, food_num_list,
sim_name, plot_settings)
def plot_averages(attrs_list_each_food_num, food_num_list, sim_name, plot_settings):
avg_attr_list = [np.mean(attrs) for attrs in attrs_list_each_food_num]
plt.scatter(food_num_list, avg_attr_list)
# plt.savefig('moinsen.png')
save_dir = 'save/{}/figs/dynamic_range_plots{}/'.format(sim_name, plot_settings['add_save_name'])
save_name = 'plot_averages.png'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
plt.savefig(save_dir+save_name, bbox_inches='tight')
plt.show()
def plot_seperated_averages(attrs_list_each_food_num_critical, attrs_list_each_food_num_sub_critical, food_num_list,
sim_name, plot_settings):
avg_attr_list_critical = [np.mean(attrs) for attrs in attrs_list_each_food_num_critical]
avg_attr_list_sub_critical = [np.mean(attrs) for attrs in attrs_list_each_food_num_sub_critical]
plt.figure(figsize=(12, 8))
# make list of list with similar food_num entries for plotting
food_num_list_extended_critical = [[food_num for i in range(len(attrs))]
for food_num, attrs in zip(food_num_list, attrs_list_each_food_num_critical)]
food_num_list_extended_sub_critical = [[food_num for i in range(len(attrs))]
for food_num, attrs in zip(food_num_list, attrs_list_each_food_num_sub_critical)]
# food_num_list_extended = np.array(food_num_list_extended)
# attrs_list_each_food_num_critical = np.array(attrs_list_each_food_num_critical)
# attrs_list_each_food_num_sub_critical = np.array(attrs_list_each_food_num_sub_critical)
# for food_num_critical, food_num_sub_critical, attr_critical, attr_sub_critical in
# zip(food_num_list_extended_critical, food_num_list_extended_critical,
# attrs_list_each_food_num_critical, attrs_list_each_food_num_sub_critical)
plt.scatter(food_num_list_extended_critical, attrs_list_each_food_num_critical,
c=plot_settings['color']['critical'], s=2, alpha=0.4)
plt.scatter(food_num_list_extended_sub_critical, attrs_list_each_food_num_sub_critical, c=plot_settings['color']['sub_critical'],
s=2, alpha=0.4)
plt.scatter(food_num_list, avg_attr_list_critical, c=plot_settings['color']['critical'], label='critical')
plt.scatter(food_num_list, avg_attr_list_sub_critical, c=plot_settings['color']['sub_critical'],
label='sub-critical')
plt.ylabel(plot_settings['attr'])
plt.xlabel('number of time steps in simulation')
plt.legend()
save_dir = 'save/{}/figs/dynamic_range_plots_time_steps{}/'.format(sim_name, plot_settings['add_save_name'])
save_name = 'plot_averages_seperated.png'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
plt.savefig(save_dir+save_name, bbox_inches='tight')
plt.show()
# TODO: Debuggen und hier weitermachen!!
def load_data(attr, sim_name):
sim_dir = 'save/{}'.format(sim_name)
attrs_list_each_food_num_all = []
attrs_list_each_food_num_critical = []
attrs_list_each_food_num_sub_critical = []
food_num_list = []
dir_list = all_folders_in_dir_with('{}/repeated_generations'.format(sim_dir), 'dynamic_range_run_time_step')
for dir in dir_list:
isings_list = load_isings_specific_path(dir)
isings = make_2d_list_1d(isings_list)
isings_populations_seperated = seperate_isolated_populations([isings])
isings_critical = isings_populations_seperated[0][0]
isings_sub_critical = isings_populations_seperated[1][0]
attrs_list_each_food_num_all.append(attribute_from_isings(isings, attr))
attrs_list_each_food_num_critical.append(attribute_from_isings(isings_critical, attr))
attrs_list_each_food_num_sub_critical.append(attribute_from_isings(isings_sub_critical, attr))
food_num_list.append(get_int_end_of_str(dir))
return attrs_list_each_food_num_all, attrs_list_each_food_num_critical, attrs_list_each_food_num_sub_critical, food_num_list
def get_int_end_of_str(s):
m = re.search(r'\d+$', s)
return int(m.group()) if m else None
def make_2d_list_1d(in_list):
out_list = []
for sub_list in in_list:
for en in sub_list:
out_list.append(en)
return out_list
if __name__ == '__main__':
plot_settings = {}
plot_settings['add_save_name'] = ''
plot_settings['attr'] = 'avg_energy'
plot_settings['color'] = {'critical': 'darkorange', 'sub_critical': 'royalblue', 'super_critical': 'maroon'}
sim_name = 'sim-20201007-230728-g_4000_-t_8000_-iso_-ref_500_-rec_c_1000_-a_200_500_1000_2000_3000_3999_-c_3_-n_different_betas_DO_LONG_TIME_STEPS_WEAKEN_SUB_CRITICAL_and_DYNAMIC_RANGE_FOOD'
plot_dynamic_range(sim_name, plot_settings)
| [
"[email protected]"
] | |
a69e06de247ad3631563edfd5c4b3257cf2749ed | 7c8bff784568691c516833ac81afc967857d24e2 | /jacc/migrations/0019_entrytype_identifier.py | effb3d0f203ab8c4e4ea27554b71aa4fcc456877 | [
"MIT"
] | permissive | kajala/django-jacc | b71f2c3df1321b9bb31e1e648895931b735949a6 | 4acb8ca2d32b11fd5afa3b5316b13be223b20ec6 | refs/heads/develop | 2023-08-18T14:12:38.196880 | 2023-08-11T15:18:57 | 2023-08-11T15:18:57 | 121,229,896 | 11 | 5 | MIT | 2021-07-12T15:02:36 | 2018-02-12T10:02:20 | Python | UTF-8 | Python | false | false | 746 | py | # Generated by Django 2.1.2 on 2018-10-18 15:36
from django.db import migrations, models
from django.db.models import F
def migr_code_to_identifier_0019_entrytype_identifier(apps, schema):
EntryType = apps.get_model("jacc", "EntryType")
EntryType.objects.all().update(identifier=F("code"))
class Migration(migrations.Migration):
dependencies = [
("jacc", "0018_auto_20181008_2322"),
]
operations = [
migrations.AddField(
model_name="entrytype",
name="identifier",
field=models.CharField(blank=True, db_index=True, default="", max_length=40, verbose_name="identifier"),
),
migrations.RunPython(migr_code_to_identifier_0019_entrytype_identifier),
]
| [
"[email protected]"
] | |
e01a13130ccc128e63bdb0486285772b63f84edf | a155780658a6d2c9b4e4adfaf822ba465f8f6be8 | /controller/jellyfish-mods/jf_phoneme.py | 9a4988ea5646229664603c154277d4e59983d701 | [] | no_license | stcybrdgs/NLP-Matching | e77ab6c63281d6d859f9a68be31c8913be20d9e6 | 6b4725e68eb4233844273d3a96b0f36b14ce8e80 | refs/heads/master | 2020-05-25T18:21:55.009741 | 2019-06-13T07:15:47 | 2019-06-13T07:15:47 | 187,928,409 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,168 | py | # -*- coding: utf-8 -*-
"""
Created on Fri May 31 19:15:42 2019
@author: Stacy
jellyfish modules for use with the controller program
"""
import jellyfish
def soundex():
tokens = ['Ball Bearing',
'bll brng',
'Centrifugal',
'centrifigal',
'PUmp',
'pmp']
print('Running SOUNDEX...')
# print tokens
print('Tokens: ', end='')
for i in tokens:
print(i,' | ', end='')
# printcodes
print('\n', end="")
print('Codes: ', end='')
for i in tokens:
print(jellyfish.soundex(i), ' | ', end='')
# ---- end function ----
def nysiis():
tokens = ['Ball Bearing',
'bll brng',
'Centrifugal',
'centrifigal',
'PUmp',
'pmp']
print('Running NYSIIS...')
# print tokens
print('Tokens: ', end='')
for i in tokens:
print(i,' | ', end='')
# printcodes
print('\n', end="")
print('Codes: ', end='')
for i in tokens:
print(jellyfish.nysiis(i), ' | ', end='')
# ---- end function ----
| [
"[email protected]"
] | |
632789f2b0dcf3c03c1d6fd2e945bda51a359db3 | c071eb46184635818e8349ce9c2a78d6c6e460fc | /system/python_stubs/-745935208/cx_Oracle/MessageProperties.py | 641ade26f658f4b7e5bbfa26034ba4823d3e2d0f | [] | no_license | sidbmw/PyCharm-Settings | a71bc594c83829a1522e215155686381b8ac5c6e | 083f9fe945ee5358346e5d86b17130d521d1b954 | refs/heads/master | 2020-04-05T14:24:03.216082 | 2018-12-28T02:29:29 | 2018-12-28T02:29:29 | 156,927,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,375 | py | # encoding: utf-8
# module cx_Oracle
# from C:\Users\siddh\AppData\Local\Programs\Python\Python37\lib\site-packages\cx_Oracle.cp37-win_amd64.pyd
# by generator 1.146
# no doc
# imports
import datetime as __datetime
from .object import object
class MessageProperties(object):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
attempts = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
correlation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
delay = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
deliverymode = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
enqtime = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
exceptionq = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
expiration = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
msgid = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
priority = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
state = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
| [
"[email protected]"
] | |
c8eaaea75b6e51740f05c80b5ee3c3dfc6fa2213 | 11ef4bbb8086ba3b9678a2037d0c28baaf8c010e | /Source Code/server/binaries/chromium/pyproto/components/data_reduction_proxy/proto/client_config_pb2.py | d1f0ee35068d28902cf5e4a9b884c2deee8f29be | [] | no_license | lineCode/wasmview.github.io | 8f845ec6ba8a1ec85272d734efc80d2416a6e15b | eac4c69ea1cf0e9af9da5a500219236470541f9b | refs/heads/master | 2020-09-22T21:05:53.766548 | 2019-08-24T05:34:04 | 2019-08-24T05:34:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 23,912 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: client_config.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='client_config.proto',
package='data_reduction_proxy',
syntax='proto2',
serialized_options=_b('H\003'),
serialized_pb=_b('\n\x13\x63lient_config.proto\x12\x14\x64\x61ta_reduction_proxy\"\xd5\x02\n\x0c\x43lientConfig\x12\x13\n\x0bsession_key\x18\x01 \x01(\t\x12\x44\n\x17\x44\x45PRECATED_refresh_time\x18\x02 \x01(\x0b\x32\x1f.data_reduction_proxy.TimestampB\x02\x18\x01\x12\x37\n\x0cproxy_config\x18\x03 \x01(\x0b\x32!.data_reduction_proxy.ProxyConfig\x12\x38\n\x10refresh_duration\x18\x04 \x01(\x0b\x32\x1e.data_reduction_proxy.Duration\x12L\n\x17pageload_metrics_config\x18\x05 \x01(\x0b\x32+.data_reduction_proxy.PageloadMetricsConfig\x12)\n!ignore_long_term_black_list_rules\x18\x07 \x01(\x08\"3\n\x15PageloadMetricsConfig\x12\x1a\n\x12reporting_fraction\x18\x01 \x01(\x02\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\"L\n\x0bProxyConfig\x12=\n\x12http_proxy_servers\x18\x01 \x03(\x0b\x32!.data_reduction_proxy.ProxyServer\"\xc1\x02\n\x0bProxyServer\x12=\n\x06scheme\x18\x01 \x01(\x0e\x32-.data_reduction_proxy.ProxyServer.ProxyScheme\x12\x0c\n\x04host\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\x05\x12R\n\x0f\x64\x65precated_type\x18\x04 \x01(\x0e\x32\x35.data_reduction_proxy.ProxyServer.DeprecatedProxyTypeB\x02\x18\x01\"L\n\x0bProxyScheme\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x08\n\x04HTTP\x10\x01\x12\t\n\x05HTTPS\x10\x02\x12\x17\n\x0f\x44\x45PRECATED_QUIC\x10\x03\x1a\x02\x08\x01\"5\n\x13\x44\x65precatedProxyType\x12\x14\n\x10UNSPECIFIED_TYPE\x10\x00\x12\x08\n\x04\x43ORE\x10\x01\"2\n\x10\x43onfigDeviceInfo\x12\x1e\n\x16total_device_memory_kb\x18\x01 \x01(\x03\"\xdd\x02\n\x19\x43reateClientConfigRequest\x12\x13\n\x0bsession_key\x18\x01 \x01(\t\x12\x37\n\x0cversion_info\x18\x02 \x01(\x0b\x32!.data_reduction_proxy.VersionInfo\x12\"\n\x1atelephony_network_operator\x18\x03 \x01(\t\x12S\n\rdogfood_group\x18\x04 \x01(\x0e\x32<.data_reduction_proxy.CreateClientConfigRequest.DogfoodGroup\x12;\n\x0b\x64\x65vice_info\x18\x05 \x01(\x0b\x32&.data_reduction_proxy.ConfigDeviceInfo\"<\n\x0c\x44ogfoodGroup\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0e\n\nNONDOGFOOD\x10\x01\x12\x0b\n\x07\x44OGFOOD\x10\x02\"L\n\x0bVersionInfo\x12\x0e\n\x06\x63lient\x18\x01 \x01(\t\x12\r\n\x05\x62uild\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0f\n\x07\x63hannel\x18\x04 \x01(\tB\x02H\x03')
)
_PROXYSERVER_PROXYSCHEME = _descriptor.EnumDescriptor(
name='ProxyScheme',
full_name='data_reduction_proxy.ProxyServer.ProxyScheme',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HTTP', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='HTTPS', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_QUIC', index=3, number=3,
serialized_options=_b('\010\001'),
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=800,
serialized_end=876,
)
_sym_db.RegisterEnumDescriptor(_PROXYSERVER_PROXYSCHEME)
_PROXYSERVER_DEPRECATEDPROXYTYPE = _descriptor.EnumDescriptor(
name='DeprecatedProxyType',
full_name='data_reduction_proxy.ProxyServer.DeprecatedProxyType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED_TYPE', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CORE', index=1, number=1,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=878,
serialized_end=931,
)
_sym_db.RegisterEnumDescriptor(_PROXYSERVER_DEPRECATEDPROXYTYPE)
_CREATECLIENTCONFIGREQUEST_DOGFOODGROUP = _descriptor.EnumDescriptor(
name='DogfoodGroup',
full_name='data_reduction_proxy.CreateClientConfigRequest.DogfoodGroup',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NONDOGFOOD', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOGFOOD', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1275,
serialized_end=1335,
)
_sym_db.RegisterEnumDescriptor(_CREATECLIENTCONFIGREQUEST_DOGFOODGROUP)
_CLIENTCONFIG = _descriptor.Descriptor(
name='ClientConfig',
full_name='data_reduction_proxy.ClientConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='session_key', full_name='data_reduction_proxy.ClientConfig.session_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='DEPRECATED_refresh_time', full_name='data_reduction_proxy.ClientConfig.DEPRECATED_refresh_time', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='proxy_config', full_name='data_reduction_proxy.ClientConfig.proxy_config', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='refresh_duration', full_name='data_reduction_proxy.ClientConfig.refresh_duration', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pageload_metrics_config', full_name='data_reduction_proxy.ClientConfig.pageload_metrics_config', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ignore_long_term_black_list_rules', full_name='data_reduction_proxy.ClientConfig.ignore_long_term_black_list_rules', index=5,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=46,
serialized_end=387,
)
_PAGELOADMETRICSCONFIG = _descriptor.Descriptor(
name='PageloadMetricsConfig',
full_name='data_reduction_proxy.PageloadMetricsConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='reporting_fraction', full_name='data_reduction_proxy.PageloadMetricsConfig.reporting_fraction', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=389,
serialized_end=440,
)
_TIMESTAMP = _descriptor.Descriptor(
name='Timestamp',
full_name='data_reduction_proxy.Timestamp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='seconds', full_name='data_reduction_proxy.Timestamp.seconds', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nanos', full_name='data_reduction_proxy.Timestamp.nanos', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=442,
serialized_end=485,
)
_DURATION = _descriptor.Descriptor(
name='Duration',
full_name='data_reduction_proxy.Duration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='seconds', full_name='data_reduction_proxy.Duration.seconds', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nanos', full_name='data_reduction_proxy.Duration.nanos', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=487,
serialized_end=529,
)
_PROXYCONFIG = _descriptor.Descriptor(
name='ProxyConfig',
full_name='data_reduction_proxy.ProxyConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='http_proxy_servers', full_name='data_reduction_proxy.ProxyConfig.http_proxy_servers', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=531,
serialized_end=607,
)
_PROXYSERVER = _descriptor.Descriptor(
name='ProxyServer',
full_name='data_reduction_proxy.ProxyServer',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='scheme', full_name='data_reduction_proxy.ProxyServer.scheme', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='host', full_name='data_reduction_proxy.ProxyServer.host', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='port', full_name='data_reduction_proxy.ProxyServer.port', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='deprecated_type', full_name='data_reduction_proxy.ProxyServer.deprecated_type', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\030\001'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_PROXYSERVER_PROXYSCHEME,
_PROXYSERVER_DEPRECATEDPROXYTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=610,
serialized_end=931,
)
_CONFIGDEVICEINFO = _descriptor.Descriptor(
name='ConfigDeviceInfo',
full_name='data_reduction_proxy.ConfigDeviceInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='total_device_memory_kb', full_name='data_reduction_proxy.ConfigDeviceInfo.total_device_memory_kb', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=933,
serialized_end=983,
)
_CREATECLIENTCONFIGREQUEST = _descriptor.Descriptor(
name='CreateClientConfigRequest',
full_name='data_reduction_proxy.CreateClientConfigRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='session_key', full_name='data_reduction_proxy.CreateClientConfigRequest.session_key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version_info', full_name='data_reduction_proxy.CreateClientConfigRequest.version_info', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='telephony_network_operator', full_name='data_reduction_proxy.CreateClientConfigRequest.telephony_network_operator', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dogfood_group', full_name='data_reduction_proxy.CreateClientConfigRequest.dogfood_group', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='device_info', full_name='data_reduction_proxy.CreateClientConfigRequest.device_info', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_CREATECLIENTCONFIGREQUEST_DOGFOODGROUP,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=986,
serialized_end=1335,
)
_VERSIONINFO = _descriptor.Descriptor(
name='VersionInfo',
full_name='data_reduction_proxy.VersionInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='client', full_name='data_reduction_proxy.VersionInfo.client', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='build', full_name='data_reduction_proxy.VersionInfo.build', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='patch', full_name='data_reduction_proxy.VersionInfo.patch', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='channel', full_name='data_reduction_proxy.VersionInfo.channel', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1337,
serialized_end=1413,
)
_CLIENTCONFIG.fields_by_name['DEPRECATED_refresh_time'].message_type = _TIMESTAMP
_CLIENTCONFIG.fields_by_name['proxy_config'].message_type = _PROXYCONFIG
_CLIENTCONFIG.fields_by_name['refresh_duration'].message_type = _DURATION
_CLIENTCONFIG.fields_by_name['pageload_metrics_config'].message_type = _PAGELOADMETRICSCONFIG
_PROXYCONFIG.fields_by_name['http_proxy_servers'].message_type = _PROXYSERVER
_PROXYSERVER.fields_by_name['scheme'].enum_type = _PROXYSERVER_PROXYSCHEME
_PROXYSERVER.fields_by_name['deprecated_type'].enum_type = _PROXYSERVER_DEPRECATEDPROXYTYPE
_PROXYSERVER_PROXYSCHEME.containing_type = _PROXYSERVER
_PROXYSERVER_DEPRECATEDPROXYTYPE.containing_type = _PROXYSERVER
_CREATECLIENTCONFIGREQUEST.fields_by_name['version_info'].message_type = _VERSIONINFO
_CREATECLIENTCONFIGREQUEST.fields_by_name['dogfood_group'].enum_type = _CREATECLIENTCONFIGREQUEST_DOGFOODGROUP
_CREATECLIENTCONFIGREQUEST.fields_by_name['device_info'].message_type = _CONFIGDEVICEINFO
_CREATECLIENTCONFIGREQUEST_DOGFOODGROUP.containing_type = _CREATECLIENTCONFIGREQUEST
DESCRIPTOR.message_types_by_name['ClientConfig'] = _CLIENTCONFIG
DESCRIPTOR.message_types_by_name['PageloadMetricsConfig'] = _PAGELOADMETRICSCONFIG
DESCRIPTOR.message_types_by_name['Timestamp'] = _TIMESTAMP
DESCRIPTOR.message_types_by_name['Duration'] = _DURATION
DESCRIPTOR.message_types_by_name['ProxyConfig'] = _PROXYCONFIG
DESCRIPTOR.message_types_by_name['ProxyServer'] = _PROXYSERVER
DESCRIPTOR.message_types_by_name['ConfigDeviceInfo'] = _CONFIGDEVICEINFO
DESCRIPTOR.message_types_by_name['CreateClientConfigRequest'] = _CREATECLIENTCONFIGREQUEST
DESCRIPTOR.message_types_by_name['VersionInfo'] = _VERSIONINFO
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ClientConfig = _reflection.GeneratedProtocolMessageType('ClientConfig', (_message.Message,), dict(
DESCRIPTOR = _CLIENTCONFIG,
__module__ = 'client_config_pb2'
# @@protoc_insertion_point(class_scope:data_reduction_proxy.ClientConfig)
))
_sym_db.RegisterMessage(ClientConfig)
PageloadMetricsConfig = _reflection.GeneratedProtocolMessageType('PageloadMetricsConfig', (_message.Message,), dict(
DESCRIPTOR = _PAGELOADMETRICSCONFIG,
__module__ = 'client_config_pb2'
# @@protoc_insertion_point(class_scope:data_reduction_proxy.PageloadMetricsConfig)
))
_sym_db.RegisterMessage(PageloadMetricsConfig)
Timestamp = _reflection.GeneratedProtocolMessageType('Timestamp', (_message.Message,), dict(
DESCRIPTOR = _TIMESTAMP,
__module__ = 'client_config_pb2'
# @@protoc_insertion_point(class_scope:data_reduction_proxy.Timestamp)
))
_sym_db.RegisterMessage(Timestamp)
Duration = _reflection.GeneratedProtocolMessageType('Duration', (_message.Message,), dict(
DESCRIPTOR = _DURATION,
__module__ = 'client_config_pb2'
# @@protoc_insertion_point(class_scope:data_reduction_proxy.Duration)
))
_sym_db.RegisterMessage(Duration)
ProxyConfig = _reflection.GeneratedProtocolMessageType('ProxyConfig', (_message.Message,), dict(
DESCRIPTOR = _PROXYCONFIG,
__module__ = 'client_config_pb2'
# @@protoc_insertion_point(class_scope:data_reduction_proxy.ProxyConfig)
))
_sym_db.RegisterMessage(ProxyConfig)
ProxyServer = _reflection.GeneratedProtocolMessageType('ProxyServer', (_message.Message,), dict(
DESCRIPTOR = _PROXYSERVER,
__module__ = 'client_config_pb2'
# @@protoc_insertion_point(class_scope:data_reduction_proxy.ProxyServer)
))
_sym_db.RegisterMessage(ProxyServer)
ConfigDeviceInfo = _reflection.GeneratedProtocolMessageType('ConfigDeviceInfo', (_message.Message,), dict(
DESCRIPTOR = _CONFIGDEVICEINFO,
__module__ = 'client_config_pb2'
# @@protoc_insertion_point(class_scope:data_reduction_proxy.ConfigDeviceInfo)
))
_sym_db.RegisterMessage(ConfigDeviceInfo)
CreateClientConfigRequest = _reflection.GeneratedProtocolMessageType('CreateClientConfigRequest', (_message.Message,), dict(
DESCRIPTOR = _CREATECLIENTCONFIGREQUEST,
__module__ = 'client_config_pb2'
# @@protoc_insertion_point(class_scope:data_reduction_proxy.CreateClientConfigRequest)
))
_sym_db.RegisterMessage(CreateClientConfigRequest)
VersionInfo = _reflection.GeneratedProtocolMessageType('VersionInfo', (_message.Message,), dict(
DESCRIPTOR = _VERSIONINFO,
__module__ = 'client_config_pb2'
# @@protoc_insertion_point(class_scope:data_reduction_proxy.VersionInfo)
))
_sym_db.RegisterMessage(VersionInfo)
DESCRIPTOR._options = None
_CLIENTCONFIG.fields_by_name['DEPRECATED_refresh_time']._options = None
_PROXYSERVER_PROXYSCHEME.values_by_name["DEPRECATED_QUIC"]._options = None
_PROXYSERVER.fields_by_name['deprecated_type']._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
12358f25a48a53f1851f8ac5027fdd19a6973bab | 59de7788673ade984b9c9fbc33664a7cbdba67d3 | /res/scripts/client/gui/scaleform/daapi/view/lobby/crewoperations/__init__.py | bd54d7b315bf7025de933b9384553c691e7e1edd | [] | no_license | webiumsk/WOT-0.9.15-CT | 3fa24ab37a6c91b7073034afb2f355efa5b7fe36 | fbd194fbaa6bdece51c7a68fc35bbb5257948341 | refs/heads/master | 2020-12-24T21:27:23.175774 | 2016-05-01T13:47:44 | 2016-05-01T13:47:44 | 57,600,180 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,734 | py | # 2016.05.01 15:21:39 Střední Evropa (letní čas)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/crewOperations/__init__.py
from gui.app_loader.settings import APP_NAME_SPACE
from gui.shared import EVENT_BUS_SCOPE
from gui.Scaleform.daapi.settings.views import VIEW_ALIAS
from gui.Scaleform.framework import GroupedViewSettings, ViewTypes, ScopeTemplates
from gui.Scaleform.framework.package_layout import PackageBusinessHandler
def getViewSettings():
from gui.Scaleform.daapi.view.lobby.crewOperations.CrewOperationsPopOver import CrewOperationsPopOver
from gui.Scaleform.daapi.view.lobby.crewOperations.RetrainCrewWindow import RetrainCrewWindow
return (GroupedViewSettings(VIEW_ALIAS.CREW_OPERATIONS_POPOVER, CrewOperationsPopOver, 'crewOperationsPopOver.swf', ViewTypes.WINDOW, 'crewOperationsPopOver', VIEW_ALIAS.CREW_OPERATIONS_POPOVER, ScopeTemplates.WINDOW_VIEWED_MULTISCOPE), GroupedViewSettings(VIEW_ALIAS.RETRAIN_CREW, RetrainCrewWindow, 'retrainCrewWindow.swf', ViewTypes.TOP_WINDOW, 'retrainCrewWindow', None, ScopeTemplates.DEFAULT_SCOPE))
def getBusinessHandlers():
return (CrewOpsBusinessHandler(),)
class CrewOpsBusinessHandler(PackageBusinessHandler):
def __init__(self):
listeners = ((VIEW_ALIAS.CREW_OPERATIONS_POPOVER, self.loadViewByCtxEvent), (VIEW_ALIAS.RETRAIN_CREW, self.loadViewByCtxEvent))
super(CrewOpsBusinessHandler, self).__init__(listeners, APP_NAME_SPACE.SF_LOBBY, EVENT_BUS_SCOPE.LOBBY)
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\gui\scaleform\daapi\view\lobby\crewoperations\__init__.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.05.01 15:21:39 Střední Evropa (letní čas)
| [
"[email protected]"
] | |
0e536a419c8eaf8064d4388c6bd6fbf237af1039 | ae7884af1ec3965b7c0eec22edad6b74f78b7ba6 | /client/full/src/UDSWindow.py | 86e5b3b9b59538fda5013a6802deb4d95ceee0e4 | [] | no_license | glyptodon/openuds | f4eefa319a3ead827dad999d24e5ee3854d1345d | 3908c875d30ec332490fc8c049bb537e10f10d08 | refs/heads/master | 2021-07-12T20:58:49.281242 | 2021-03-05T22:42:55 | 2021-03-05T22:42:55 | 62,921,174 | 0 | 1 | null | 2016-07-08T22:33:44 | 2016-07-08T22:33:44 | null | UTF-8 | Python | false | false | 4,671 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'UDSWindow.ui'
#
# Created: Mon Apr 27 21:41:43 2015
# by: PyQt4 UI code generator 4.11.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.setWindowModality(QtCore.Qt.NonModal)
MainWindow.resize(259, 185)
MainWindow.setCursor(QtGui.QCursor(QtCore.Qt.BusyCursor))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/images/logo-uds-small")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
MainWindow.setWindowOpacity(1.0)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setAutoFillBackground(True)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout_2.setSpacing(4)
self.verticalLayout_2.setMargin(4)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.frame = QtGui.QFrame(self.centralwidget)
self.frame.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame.setFrameShadow(QtGui.QFrame.Raised)
self.frame.setObjectName(_fromUtf8("frame"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.frame)
self.verticalLayout_3.setSpacing(4)
self.verticalLayout_3.setMargin(4)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.image = QtGui.QLabel(self.frame)
self.image.setMinimumSize(QtCore.QSize(0, 24))
self.image.setAutoFillBackground(True)
self.image.setText(_fromUtf8(""))
self.image.setPixmap(QtGui.QPixmap(_fromUtf8(":/images/logo-uds-small")))
self.image.setScaledContents(False)
self.image.setAlignment(QtCore.Qt.AlignCenter)
self.image.setObjectName(_fromUtf8("image"))
self.verticalLayout.addWidget(self.image)
self.info = QtGui.QLabel(self.frame)
self.info.setMaximumSize(QtCore.QSize(16777215, 16))
self.info.setObjectName(_fromUtf8("info"))
self.verticalLayout.addWidget(self.info)
self.progressBar = QtGui.QProgressBar(self.frame)
self.progressBar.setProperty("value", 24)
self.progressBar.setTextVisible(False)
self.progressBar.setObjectName(_fromUtf8("progressBar"))
self.verticalLayout.addWidget(self.progressBar)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.cancelButton = QtGui.QPushButton(self.frame)
self.cancelButton.setDefault(True)
self.cancelButton.setFlat(False)
self.cancelButton.setObjectName(_fromUtf8("cancelButton"))
self.horizontalLayout.addWidget(self.cancelButton)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.verticalLayout.addLayout(self.horizontalLayout)
self.verticalLayout_3.addLayout(self.verticalLayout)
self.verticalLayout_2.addWidget(self.frame)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "UDS Connection", None))
self.info.setText(_translate("MainWindow", "TextLabel", None))
self.cancelButton.setText(_translate("MainWindow", "Cancel", None))
import UDSResources_rc
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
MainWindow = QtGui.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| [
"[email protected]"
] | |
7b9bd540581484c8680a1ae63384c40888b2f12c | 7b4820948845f55274b211d676ab8a6253a6298b | /addons/plugin.video.icefilms/resources/lib/resolvers.py | 36e1264a0b0330c1387add5a845b2d5bbb7ed891 | [] | no_license | bopopescu/mw | 524c57d4b859751e298b907a12e44e9711ef72a6 | 5ef2acea0fb4150578e53201463c6bc5da37be20 | refs/heads/master | 2021-05-30T19:33:11.750160 | 2016-01-11T05:28:46 | 2016-01-11T05:28:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 38,586 | py | import xbmc,xbmcgui
import os
import urllib, urllib2
import cookielib
import re
import jsunpack
''' Use addon.common library for http calls '''
from addon.common.net import Net
from addon.common.addon import Addon
net = Net()
addon = Addon('plugin.video.icefilms')
datapath = addon.get_profile()
cookie_path = os.path.join(datapath, 'cookies')
USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.99 Safari/537.36'
ACCEPT = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
def handle_captchas(url, html, data, dialog):
headers = {'Referer': url}
puzzle_img = os.path.join(datapath, "solve_puzzle.png")
#Check for type of captcha used
solvemedia = re.search('<iframe src="(http://api.solvemedia.com.+?)"', html)
recaptcha = re.search('<script type="text/javascript" src="(http://www.google.com.+?)">', html)
numeric_captcha = re.compile("left:(\d+)px;padding-top:\d+px;'>&#(.+?);<").findall(html)
#SolveMedia captcha
if solvemedia:
dialog.close()
html = net.http_GET(solvemedia.group(1), headers=headers).content
for match in re.finditer(r'type=hidden.*?name="([^"]+)".*?value="([^"]+)', html):
name, value = match.groups()
data[name] = value
#Check for alternate puzzle type - stored in a div
alt_frame = re.search('<div><iframe src="(/papi/media[^"]+)', html)
if alt_frame:
html = net.http_GET("http://api.solvemedia.com%s" % alt_frame.group(1)).content
alt_puzzle = re.search('<div\s+id="typein">\s*<img\s+src="data:image/png;base64,([^"]+)', html, re.DOTALL)
if alt_puzzle:
open(puzzle_img, 'wb').write(alt_puzzle.group(1).decode('base64'))
else:
open(puzzle_img, 'wb').write(net.http_GET("http://api.solvemedia.com%s" % re.search('<img src="(/papi/media[^"]+)"', html).group(1)).content)
img = xbmcgui.ControlImage(450,15,400,130, puzzle_img)
wdlg = xbmcgui.WindowDialog()
wdlg.addControl(img)
wdlg.show()
xbmc.sleep(3000)
kb = xbmc.Keyboard('', 'Type the letters in the image', False)
kb.doModal()
capcode = kb.getText()
if (kb.isConfirmed()):
userInput = kb.getText()
if userInput != '':
solution = kb.getText()
elif userInput == '':
raise Exception ('You must enter text in the image to access video')
wdlg.close()
else:
wdlg.close()
raise Exception ('Captcha Error')
wdlg.close()
data['adcopy_response'] = solution
html = net.http_POST('http://api.solvemedia.com/papi/verify.noscript', data)
data.update({'adcopy_challenge': data['adcopy_challenge'],'adcopy_response': 'manual_challenge'})
#Google Recaptcha
elif recaptcha:
dialog.close()
html = net.http_GET(recaptcha.group(1), headers=headers).content
part = re.search("challenge \: \\'(.+?)\\'", html)
captchaimg = 'http://www.google.com/recaptcha/api/image?c='+part.group(1)
img = xbmcgui.ControlImage(450,15,400,130,captchaimg)
wdlg = xbmcgui.WindowDialog()
wdlg.addControl(img)
wdlg.show()
xbmc.sleep(3000)
kb = xbmc.Keyboard('', 'Type the letters in the image', False)
kb.doModal()
capcode = kb.getText()
if (kb.isConfirmed()):
userInput = kb.getText()
if userInput != '':
solution = kb.getText()
elif userInput == '':
raise Exception ('You must enter text in the image to access video')
wdlg.close()
else:
wdlg.close()
raise Exception ('Captcha Error')
wdlg.close()
data.update({'recaptcha_challenge_field':part.group(1),'recaptcha_response_field':solution})
#Numeric captcha - we can programmatically figure this out
elif numeric_captcha:
result = sorted(numeric_captcha, key=lambda ltr: int(ltr[0]))
solution = ''.join(str(int(num[1])-48) for num in result)
data.update({'code':solution})
return data
def resolve_180upload(url):
try:
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving 180Upload Link...')
dialog.update(0)
headers = {'Referer': url}
media_id = re.search('//.+?/([\w]+)', url).group(1)
web_url = 'http://180upload.com/embed-%s.html' % media_id
addon.log_debug( '180Upload - Requesting GET URL: %s' % web_url)
html = net.http_GET(web_url).content
dialog.update(50)
wrong_captcha = True
while wrong_captcha:
data = {}
r = re.findall(r'type="hidden" name="(.+?)" value="(.+?)"', html)
if r:
for name, value in r:
data[name] = value
else:
raise Exception('Unable to resolve 180Upload Link')
# 1st attempt, probably no captcha
addon.log('180Upload - Requesting POST URL: %s Data values: %s' % (web_url, data))
html = net.http_POST(web_url, data, headers=headers).content
packed = re.search('id="player_code".*?(eval.*?\)\)\))', html,re.DOTALL)
if packed:
js = jsunpack.unpack(packed.group(1))
link = re.search('name="src"0="([^"]+)"/>', js.replace('\\',''))
if link:
addon.log('180Upload Link Found: %s' % link.group(1))
dialog.update(100)
return link.group(1) + '|Referer=%s&User-Agent=%s' % (url, USER_AGENT)
else:
link = re.search("'file','(.+?)'", js.replace('\\',''))
if link:
addon.log('180Upload Link Found: %s' % link.group(1))
return link.group(1) + '|Referer=%s&User-Agent=%s' % (url, USER_AGENT)
#Cannot get video without captcha, so try regular url
html = net.http_GET(url).content
data = {}
r = re.findall(r'type="hidden" name="(.+?)" value="(.+?)">', html)
if r:
for name, value in r:
data[name] = value
else:
raise Exception('Unable to resolve 180Upload Link')
#Check for captcha
data = handle_captchas(url, html, data, dialog)
dialog.create('Resolving', 'Resolving 180Uploads Link...')
dialog.update(50)
addon.log_debug( '180Upload - Requesting POST URL: %s Data: %s' % (url, data))
html = net.http_POST(url, data, headers=headers).content
wrong_captcha = re.search('<div class="err">Wrong captcha</div>', html)
if wrong_captcha:
addon.show_ok_dialog(['Wrong captcha entered, try again'], title='Wrong Captcha', is_error=False)
dialog.update(100)
link = re.search('id="lnk_download[^"]*" href="([^"]+)', html)
if link:
addon.log_debug( '180Upload Link Found: %s' % link.group(1))
return link.group(1) + '|Referer=%s&User-Agent=%s' % (url, USER_AGENT)
else:
raise Exception('Unable to resolve 180Upload Link')
except Exception, e:
addon.log_error('**** 180Upload Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_24uploading(url):
try:
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving 24Uploading Link...')
dialog.update(0)
addon.log_debug('24Uploading - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(33)
wrong_captcha = True
while wrong_captcha:
data = {}
r = re.findall('type="(hidden|submit)" name="(.+?)" value="(.*?)">', html)
if r:
for none, name, value in r:
data[name] = value
else:
raise Exception('Unable to resolve 24Uploading Link')
addon.log('24Uploading - Requesting POST URL: %s DATA: %s' % (url, data))
html = net.http_POST(url, data).content
dialog.update(66)
data = {}
r = re.findall('type="(hidden|submit)" name="(.+?)" value="(.*?)">', html)
if r:
for none, name, value in r:
data[name] = value
else:
raise Exception('Unable to resolve 24Uploading Link')
#Handle captcha
data = handle_captchas(url, html, data, dialog)
dialog.create('Resolving', 'Resolving 24Uploading Link...')
dialog.update(66)
addon.log('24Uploading - Requesting POST URL: %s DATA: %s' % (url, data))
html = net.http_POST(url, data).content
wrong_captcha = re.search('<div class="err">Wrong captcha</div>', html)
if wrong_captcha:
addon.show_ok_dialog(['Wrong captcha entered, try again'], title='Wrong Captcha', is_error=False)
dialog.update(100)
link = re.search('<div class="btn_down">.+<a href="(.+?)" style="display:block;">', html, re.DOTALL)
if link:
addon.log_debug('24Uploading Link Found: %s' % link.group(1))
return link.group(1)
else:
raise Exception('Unable to resolve 24Uploading Link')
except Exception, e:
addon.log_error('**** 24Uploading Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_clicknupload(url):
try:
media_id = re.search('//.+?/([\w]+)', url).group(1)
url = 'http://clicknupload.me/%s' % media_id
headers = {'Referer': url}
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving ClicknUpload Link...')
dialog.update(0)
addon.log('ClicknUpload - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(33)
#Check page for any error msgs
if re.search('<b>File Not Found</b>', html):
addon.log_error('***** ClicknUpload - File is deleted')
raise Exception('File has been deleted from the host')
#Set POST data values
data = {}
r = re.findall('type="(hidden|submit)" name="(.+?)" value="(.*?)">', html)
if r:
for none, name, value in r:
data[name] = value
addon.log('ClicknUpload - Requesting POST URL: %s DATA: %s' % (url, data))
html = net.http_POST(url, data, headers=headers).content
dialog.update(66)
data = {}
r = re.findall('type="(hidden|submit)" name="(.+?)" value="(.*?)">', html)
if r:
for none, name, value in r:
data[name] = value
#Check for captcha
data = handle_captchas(url, html, data, dialog)
wait_string = re.search('<span id="countdown_str">Please wait <span id=".+?" style=".+?">([0-9]+)</span>', html)
if wait_string:
xbmc.sleep(int(wait_string.group(1)) * 1000)
addon.log('ClicknUpload - Requesting POST URL: %s DATA: %s' % (url, data))
html = net.http_POST(url, data, headers=headers).content
#Get download link
dialog.update(100)
link = re.search("onClick\s*=\s*\"window\.open\('([^']+)", html)
if link:
return link.group(1) + '|User-Agent=%s' % USER_AGENT
else:
raise Exception("Unable to find final link")
except Exception, e:
addon.log_error('**** ClicknUpload Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_upload_af(url):
try:
headers = {'Referer': url}
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving Upload.af Link...')
dialog.update(0)
addon.log('Upload.af - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(33)
#Check page for any error msgs
if re.search('<b>File Not Found</b>', html):
addon.log_error('***** Upload.af - File is deleted')
raise Exception('File has been deleted from the host')
#Set POST data values
data = {}
r = re.findall('type="(hidden|submit)" name="(.+?)" value="(.*?)">', html)
if r:
for none, name, value in r:
data[name] = value
data['method_free'] = 'Free Download >>'
addon.log('Upload.af - Requesting POST URL: %s DATA: %s' % (url, data))
html = net.http_POST(url, data, headers=headers).content
dialog.update(66)
data = {}
r = re.findall('type="(hidden|submit)" name="(.+?)" value="(.*?)">', html)
if r:
for none, name, value in r:
data[name] = value
#Check for captcha
data = handle_captchas(url, html, data, dialog)
wait_string = re.search('<div class="btn btn-danger" id="countdown">Wait <b class="seconds">([0-9]+)</b> seconds</div>', html)
if wait_string:
xbmc.sleep(int(wait_string.group(1)) * 1000)
addon.log('Upload.af - Requesting POST URL: %s DATA: %s' % (url, data))
html = net.http_POST(url, data, headers=headers).content
#Get download link
dialog.update(100)
link = re.search('<a href="(.+?)".+?>Download</a>', html)
if link:
return link.group(1) + '|User-Agent=%s' % USER_AGENT
else:
raise Exception("Unable to find final link")
except Exception, e:
addon.log_error('**** Upload.af Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_uploadx(url):
try:
headers = {'Referer': url}
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving Uploadx Link...')
dialog.update(0)
addon.log('Uploadx - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(33)
#Check page for any error msgs
if re.search('<b>File Not Found</b>', html):
addon.log_error('***** Uploadx - File is deleted')
raise Exception('File has been deleted from the host')
#Set POST data values
data = {}
r = re.findall('type="(hidden|submit)" name="(.+?)" value="(.*?)">', html)
if r:
for none, name, value in r:
data[name] = value
data['method_free'] = 'Free Download >>'
addon.log('Uploadx - Requesting POST URL: %s DATA: %s' % (url, data))
html = net.http_POST(url, data, headers=headers).content
dialog.update(66)
data = {}
r = re.findall('type="(hidden|submit)" name="(.+?)" value="(.*?)">', html)
if r:
for none, name, value in r:
data[name] = value
#Check for captcha
data = handle_captchas(url, html, data, dialog)
# wait_string = re.search('<div class="btn btn-danger" id="countdown">Wait <b class="seconds">([0-9]+)</b> seconds</div>', html)
# if wait_string:
# xbmc.sleep(int(wait_string.group(1)) * 1000)
addon.log('Uploadx - Requesting POST URL: %s DATA: %s' % (url, data))
html = net.http_POST(url, data, headers=headers).content
#Get download link
dialog.update(100)
link = re.search('<a href="(.+?)".+?>Download</a>', html)
if link:
return link.group(1) + '|User-Agent=%s' % USER_AGENT
else:
raise Exception("Unable to find final link")
except Exception, e:
addon.log_error('**** Uploadx Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_vidhog(url):
try:
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving VidHog Link...')
dialog.update(0)
addon.log_debug('VidHog - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(50)
#Check page for any error msgs
if re.search('This server is in maintenance mode', html):
raise Exception('File is currently unavailable on the host')
if re.search('<b>File Not Found</b>', html):
raise Exception('File has been deleted')
filename = re.search('<strong>\(<font color="red">(.+?)</font>\)</strong><br><br>', html).group(1)
extension = re.search('(\.[^\.]*$)', filename).group(1)
guid = re.search('http://vidhog.com/(.+)$', url).group(1)
vid_embed_url = 'http://vidhog.com/vidembed-%s%s' % (guid, extension)
request = urllib2.Request(vid_embed_url)
request.add_header('User-Agent', USER_AGENT)
request.add_header('Accept', ACCEPT)
request.add_header('Referer', url)
response = urllib2.urlopen(request)
redirect_url = re.search('(http://.+?)video', response.geturl()).group(1)
download_link = redirect_url + filename
dialog.update(100)
return download_link
except Exception, e:
addon.log_error('**** VidHog Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_vidplay(url):
try:
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving VidPlay Link...')
dialog.update(0)
addon.log_debug('VidPlay - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(50)
#Check page for any error msgs
if re.search('This server is in maintenance mode', html):
raise Exception('File is currently unavailable on the host')
if re.search('<b>File Not Found</b>', html):
raise Exception('File has been deleted')
filename = re.search('<h4>(.+?)</h4>', html).group(1)
extension = re.search('(\.[^\.]*$)', filename).group(1)
guid = re.search('http://vidplay.net/(.+)$', url).group(1)
vid_embed_url = 'http://vidplay.net/vidembed-%s%s' % (guid, extension)
request = urllib2.Request(vid_embed_url)
request.add_header('User-Agent', USER_AGENT)
request.add_header('Accept', ACCEPT)
request.add_header('Referer', url)
response = urllib2.urlopen(request)
redirect_url = re.search('(http://.+?)video', response.geturl()).group(1)
download_link = redirect_url + filename + '|Referer=%s&User-Agent=%s' % (url, USER_AGENT)
dialog.update(100)
return download_link
except Exception, e:
addon.log_error('**** VidPlay Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_epicshare(url):
try:
puzzle_img = os.path.join(datapath, "epicshare_puzzle.png")
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving EpicShare Link...')
dialog.update(0)
addon.log('EpicShare - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(50)
#Check page for any error msgs
if re.search('This server is in maintenance mode', html):
addon.log_error('***** EpicShare - Site reported maintenance mode')
raise Exception('File is currently unavailable on the host')
if re.search('<b>File Not Found</b>', html):
addon.log_error('***** EpicShare - File not found')
raise Exception('File has been deleted')
wrong_captcha = True
while wrong_captcha:
data = {}
r = re.findall(r'type="hidden" name="(.+?)" value="(.+?)">', html)
if r:
for name, value in r:
data[name] = value
else:
addon.log_error('***** EpicShare - Cannot find data values')
raise Exception('Unable to resolve EpicShare Link')
#Handle captcha
data = handle_captchas(url, html, data, dialog)
dialog.create('Resolving', 'Resolving EpicShare Link...')
dialog.update(50)
addon.log('EpicShare - Requesting POST URL: %s' % url)
html = net.http_POST(url, data).content
wrong_captcha = re.search('<div class="err">Wrong captcha</div>', html)
if wrong_captcha:
addon.show_ok_dialog(['Wrong captcha entered, try again'], title='Wrong Captcha', is_error=False)
dialog.update(100)
link = re.search('product_download_url=(.+?)"', html)
if link:
addon.log('EpicShare Link Found: %s' % link.group(1))
return link.group(1)
else:
addon.log_error('***** EpicShare - Cannot find final link')
raise Exception('Unable to resolve EpicShare Link')
except Exception, e:
addon.log_error('**** EpicShare Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_hugefiles(url):
try:
headers = {'Referer': 'http://www.icefilms.info/', 'host': 'hugefiles.net'}
puzzle_img = os.path.join(datapath, "hugefiles_puzzle.png")
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving HugeFiles Link...')
dialog.update(0)
media_id = re.search('//.+?/([\w]+)', url).group(1)
web_url = 'http://hugefiles.net/embed-%s.html' % media_id
addon.log_debug('HugeFiles - Requesting GET URL: %s' % web_url)
html = net.http_GET(web_url, headers=headers).content
dialog.update(50)
#Check page for any error msgs
if re.search('<h3>File Not found</h3>', html):
addon.log_error('***** HugeFiles - File Not Found')
raise Exception('File Not Found')
wrong_captcha = True
headers = {'Referer': web_url, 'host': 'hugefiles.net'}
while wrong_captcha:
#Set POST data values
data = {}
r = re.findall(r'type="hidden"\s+name="([^"]+)"\s+value="([^"]+)', html)
if r:
for name, value in r:
data[name] = value
else:
addon.log_error('***** HugeFiles - Cannot find data values')
raise Exception('Unable to resolve HugeFiles Link')
data['method_free'] = 'Free Download'
data['w'] = ""
data['h'] = ""
#Handle captcha
data.update(handle_captchas(web_url, html, data, dialog))
dialog.create('Resolving', 'Resolving HugeFiles Link...')
dialog.update(50)
addon.log('HugeFiles - Requesting POST URL: %s DATA: %s' % (web_url, data))
html = net.http_POST(web_url, data, headers=headers).content
solvemedia = re.search('<iframe src="((?:http:)?//api.solvemedia.com[^"]+)', html)
recaptcha = re.search('<script type="text/javascript" src="(http://www.google.com[^"]+)', html)
numeric_captcha = re.compile("left:(\d+)px;padding-top:\d+px;'>&#(.+?);<").findall(html)
if solvemedia or recaptcha or numeric_captcha:
addon.show_ok_dialog(['Wrong captcha entered, try again'], title='Wrong Captcha', is_error=False)
else:
wrong_captcha = False
#Get download link
dialog.update(100)
packed = re.search('id="player_code".*?(eval.*?\)\)\))', html,re.DOTALL)
if packed:
js = jsunpack.unpack(packed.group(1))
link = re.search('name="src"0="([^"]+)"/>', js.replace('\\',''))
if link:
addon.log('HugeFiles Link Found: %s' % link.group(1))
return link.group(1) + '|Referer=%s&User-Agent=%s' % (url, USER_AGENT)
else:
link = re.search("'file','(.+?)'", js.replace('\\',''))
if link:
addon.log('HugeFiles Link Found: %s' % link.group(1))
return link.group(1) + '|Referer=%s&User-Agent=%s' % (url, USER_AGENT)
#r = re.search('fileUrl\s*=\s*"([^"]+)', html)
#if r:
# return r.group(1)
except Exception, e:
addon.log_error('**** HugeFiles Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_entroupload(url):
try:
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving EntroUpload Link...')
dialog.update(0)
addon.log('EntroUpload - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(50)
#Check page for any error msgs
if re.search('<b>File Not Found</b>', html):
addon.log_error('***** EntroUpload - File Not Found')
raise Exception('File Not Found')
#Set POST data values
data = {}
r = re.findall(r'type="hidden" name="(.+?)" value="(.+?)">', html)
if r:
for name, value in r:
data[name] = value
else:
addon.log_error('***** EntroUpload - Cannot find data values')
raise Exception('Unable to resolve EntroUpload Link')
data['method_free'] = 'Free Download'
file_name = data['fname']
addon.log('EntroUpload - Requesting POST URL: %s DATA: %s' % (url, data))
html = net.http_POST(url, data).content
#Get download link
dialog.update(100)
sPattern = '<script type=(?:"|\')text/javascript(?:"|\')>(eval\('
sPattern += 'function\(p,a,c,k,e,d\)(?!.+player_ads.+).+np_vid.+?)'
sPattern += '\s+?</script>'
r = re.search(sPattern, html, re.DOTALL + re.IGNORECASE)
if r:
sJavascript = r.group(1)
sUnpacked = jsunpack.unpack(sJavascript)
sPattern = '<embed id="np_vid"type="video/divx"src="(.+?)'
sPattern += '"custommode='
r = re.search(sPattern, sUnpacked)
if r:
return r.group(1)
else:
addon.log_error('***** EntroUpload - Cannot find final link')
raise Exception('Unable to resolve EntroUpload Link')
else:
addon.log_error('***** EntroUpload - Cannot find final link')
raise Exception('Unable to resolve EntroUpload Link')
except Exception, e:
addon.log_error('**** EntroUpload Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_donevideo(url):
try:
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving DoneVideo Link...')
dialog.update(0)
addon.log('DoneVideo - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
data = {}
r = re.findall(r'type="hidden" name="(.+?)" value="(.+?)">', html)
if r:
for name, value in r:
data[name] = value
else:
addon.log_error('***** DoneVideo - Cannot find data values')
raise Exception('Unable to resolve DoneVideo Link')
data['method_free'] = 'Continue to Video'
addon.log('DoneVideo - Requesting POST URL: %s' % url)
html = net.http_POST(url, data).content
dialog.update(50)
r = re.findall(r'type="hidden" name="(.+?)" value="(.+?)">', html)
if r:
for name, value in r:
data[name] = value
else:
addon.log_error('Could not resolve link')
data['method_free'] = 'Continue to Video'
addon.log('DoneVideo - Requesting POST URL: %s' % url)
html = net.http_POST(url, data).content
#Get download link
dialog.update(100)
sPattern = '''<div id="player_code">.*?<script type='text/javascript'>(eval.+?)</script>'''
r = re.search(sPattern, html, re.DOTALL + re.IGNORECASE)
if r:
sJavascript = r.group(1)
sUnpacked = jsunpack.unpack(sJavascript)
sUnpacked = sUnpacked.replace("\\","")
r = re.search("addVariable.+?'file','(.+?)'", sUnpacked)
if r:
return r.group(1)
else:
sPattern = '<embed id="np_vid"type="video/divx"src="(.+?)'
sPattern += '"custommode='
r = re.search(sPattern, sUnpacked)
if r:
return r.group(1)
else:
addon.log_error('***** DoneVideo - Cannot find final link')
raise Exception('Unable to resolve DoneVideo Link')
except Exception, e:
addon.log_error('**** DoneVideo Error occured: %s' % e)
raise
finally:
dialog.close()
def SHARED2_HANDLER(url):
html = net.http_GET(url).content
#Check if a download limit msg is showing
if re.search('Your free download limit is over.', html):
wait_time = re.search('<span id="timeToWait">(.+?)</span>', html).group(1)
Notify('big','2Shared Download Limit Exceeded','You have reached your download limit', '', '', 'You must wait ' + wait_time + ' to try again' )
return None
#If no download limit msg lets grab link, must post to it first for download to activate
else:
d3fid = re.search('<input type="hidden" name="d3fid" value="(.+?)">', html).group(1)
d3link = re.search('<input type="hidden" name="d3link" value="(.+?)">', html).group(1)
data = {'d3fid': d3fid, 'd3link': d3link}
html = net.http_POST(url, data).content
return d3link
def resolve_tusfiles(url):
try:
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving TusFiles Link...')
dialog.update(0)
addon.log('TusFiles - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(50)
#Check page for any error msgs
if re.search('This server is in maintenance mode', html):
addon.log_error('***** TusFiles - Site reported maintenance mode')
raise Exception('File is currently unavailable on the host')
if re.search('<b>File Not Found</b>', html):
addon.log_error('***** TusFiles - File not found')
raise Exception('File has been deleted')
filename = re.search('Start download<h1><span class="label label-default"><FONT COLOR="#ffffff">(.+?)</FONT>', html).group(1)
filename = filename.split('/')[-1]
extension = re.search('(\.[^\.]*$)', filename).group(1)
guid = re.search('http://tusfiles.net/(.+)$', url).group(1)
vid_embed_url = 'http://tusfiles.net/vidembed-%s%s' % (guid, extension)
request = urllib2.Request(vid_embed_url)
request.add_header('User-Agent', USER_AGENT)
request.add_header('Accept', ACCEPT)
request.add_header('Referer', url)
response = urllib2.urlopen(request)
redirect_url = re.search('(http[s]*://.+?)video', response.geturl()).group(1)
download_link = redirect_url + filename
dialog.update(100)
return download_link
except Exception, e:
addon.log_error('**** TusFiles Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_xfileload(url):
try:
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving XfileLoad Link...')
dialog.update(0)
addon.log('XfileLoad - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(50)
#Check page for any error msgs
if re.search('<li>The file was deleted by its owner', html):
addon.log_error('***** XfileLoad - File is deleted')
raise Exception('File has been deleted from the host')
#Set POST data values
data = {}
r = re.findall('type="(hidden|submit)" name="(.+?)" value="(.*?)">', html)
if r:
for none, name, value in r:
data[name] = value
addon.log('XfileLoad - Requesting POST URL: %s DATA: %s' % (url, data))
html = net.http_POST(url, data).content
#Get download link
dialog.update(100)
link = re.search('<a href="(.+?)" target=""><img src="http://xfileload.com/3ghdes/images/downdown.png" /></a>', html)
if link:
return link.group(1)
else:
raise Exception("Unable to find final link")
except Exception, e:
addon.log_error('**** XfileLoad Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_mightyupload(url):
try:
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving MightyUpload Link...')
dialog.update(0)
url = url.replace('/embed-', '/')
url = re.compile('//.+?/([\w]+)').findall(url)[0]
url = 'http://www.mightyupload.com/embed-%s.html' % url
addon.log('MightyUpload - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(100)
link = re.compile("file *: *'(.+?)'").findall(html)
if len(link) > 0:
return link[0] + '|User-Agent=%s' % (USER_AGENT)
result = re.compile('(eval.*?\)\)\))').findall(html)[-1]
if result:
sJavascript = result
sUnpacked = jsunpack.unpack(sJavascript)
r = re.search("'file','([^']+)'", sUnpacked.replace('\\', ''))
if not r:
r = re.search('"src"value="([^"]+)', sUnpacked.replace('\\', ''))
if not r:
r = re.search('"src"[0-9]="(.+?)"/>', sUnpacked.replace('\\', ''))
if r:
return r.group(1) + '|User-Agent=%s' % (USER_AGENT)
else:
raise Exception("Unable to find final link")
except Exception, e:
addon.log_error('**** MightyUpload Error occured: %s' % e)
raise
finally:
dialog.close()
def resolve_xvidstage(url):
try:
#Show dialog box so user knows something is happening
dialog = xbmcgui.DialogProgress()
dialog.create('Resolving', 'Resolving XvidStage Link...')
dialog.update(0)
url = url.replace('/embed-', '/')
url = re.compile('//.+?/([\w]+)').findall(url)[0]
url = 'http://xvidstage.com/embed-%s.html' % url
addon.log('XvidStage - Requesting GET URL: %s' % url)
html = net.http_GET(url).content
dialog.update(100)
result = re.compile('(eval.*?\)\)\))').findall(html)[-1]
if result:
sJavascript = result
sUnpacked = jsunpack.unpack(sJavascript)
sPattern = "'file','(.+?)'"
r = re.search(sPattern, sUnpacked)
if r:
return r.group(1)
else:
raise Exception("Unable to find final link")
else:
raise Exception("Unable to find final link")
except Exception, e:
addon.log_error('**** XvidStage Error occured: %s' % e)
raise
finally:
dialog.close() | [
"[email protected]"
] | |
14c05659bfcf17e4fd5989ae12e8a8272b62a798 | dfe0798a322dca6b90e10743936c500e618ff078 | /Sample_Project/env/lib/python3.8/site-packages/sipconfig.py | 1dd5c9bc7e2d6bcd641e27dea9ab88bf6031ba90 | [
"Python-2.0"
] | permissive | SenthilKumar009/Udemy-MLandDS-CompleteMastery | 7d0ff9d2ffa688ba35de5667441eafc443f9792a | ca5e867134ad2bbf03d158d78b34905390ab58b2 | refs/heads/master | 2022-11-21T16:01:51.416001 | 2020-06-16T03:15:48 | 2020-06-16T03:15:48 | 270,206,193 | 1 | 1 | null | 2022-11-16T05:57:51 | 2020-06-07T05:44:20 | Jupyter Notebook | UTF-8 | Python | false | false | 97,321 | py | # This module is intended to be used by the build/installation scripts of
# extension modules created with SIP. It provides information about file
# locations, version numbers etc., and provides some classes and functions.
#
# Copyright (c) 2018 Riverbank Computing Limited <[email protected]>
#
# This file is part of SIP.
#
# This copy of SIP is licensed for use under the terms of the SIP License
# Agreement. See the file LICENSE for more details.
#
# This copy of SIP may also used under the terms of the GNU General Public
# License v2 or v3 as published by the Free Software Foundation which can be
# found in the files LICENSE-GPL2 and LICENSE-GPL3 included in this package.
#
# SIP is supplied WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
import sys
import os
import stat
import string
import re
# These are installation specific values created when SIP was configured.
_pkg_config = {
'arch': '',
'default_bin_dir': '/home/skk_thenotorious/Documents/Udemy-MachineLearning/Sample_Project/env/bin',
'default_mod_dir': '/home/skk_thenotorious/Documents/Udemy-MachineLearning/Sample_Project/env/lib/python3.8/site-packages',
'default_sip_dir': '/home/skk_thenotorious/Documents/Udemy-MachineLearning/Sample_Project/env/share/sip',
'deployment_target': '',
'platform': 'linux-g++',
'py_conf_inc_dir': '/home/skk_thenotorious/Documents/Udemy-MachineLearning/Sample_Project/env/include/python3.8',
'py_inc_dir': '/home/skk_thenotorious/Documents/Udemy-MachineLearning/Sample_Project/env/include/python3.8',
'py_lib_dir': '/home/skk_thenotorious/Documents/Udemy-MachineLearning/Sample_Project/env/lib/python3.8/config',
'py_version': 0x030800,
'qt_framework': 0,
'sip_bin': '/home/skk_thenotorious/Documents/Udemy-MachineLearning/Sample_Project/env/bin/sip',
'sip_config_args': '--sysroot=/home/skk_thenotorious/Documents/Udemy-MachineLearning/Sample_Project/env',
'sip_inc_dir': '/home/skk_thenotorious/Documents/Udemy-MachineLearning/Sample_Project/env/include/python3.8',
'sip_module_dir': '/home/skk_thenotorious/Documents/Udemy-MachineLearning/Sample_Project/env/lib/python3.8/site-packages',
'sip_root_dir': '/home/skk_thenotorious/Documents/Udemy-MachineLearning/Sample_Project/env/lib/python3.8/site-packages',
'sip_version': 0x04130d,
'sip_version_str': '4.19.13',
'universal': ''
}
_default_macros = {
'AIX_SHLIB': '',
'AR': 'ar cqs',
'CC': 'gcc',
'CFLAGS': '-pipe',
'CFLAGS_APP': '',
'CFLAGS_CONSOLE': '',
'CFLAGS_DEBUG': '-g',
'CFLAGS_EXCEPTIONS_OFF': '',
'CFLAGS_EXCEPTIONS_ON': '',
'CFLAGS_MT': '',
'CFLAGS_MT_DBG': '',
'CFLAGS_MT_DLL': '',
'CFLAGS_MT_DLLDBG': '',
'CFLAGS_RELEASE': '-O2',
'CFLAGS_RTTI_OFF': '',
'CFLAGS_RTTI_ON': '',
'CFLAGS_SHLIB': '-fPIC',
'CFLAGS_STL_OFF': '',
'CFLAGS_STL_ON': '',
'CFLAGS_THREAD': '-D_REENTRANT',
'CFLAGS_WARN_OFF': '-w',
'CFLAGS_WARN_ON': '-Wall -W',
'CHK_DIR_EXISTS': 'test -d',
'CONFIG': 'qt warn_on release incremental link_prl',
'COPY': 'cp -f',
'CXX': 'g++',
'CXXFLAGS': '-pipe',
'CXXFLAGS_APP': '',
'CXXFLAGS_CONSOLE': '',
'CXXFLAGS_DEBUG': '-g',
'CXXFLAGS_EXCEPTIONS_OFF': '',
'CXXFLAGS_EXCEPTIONS_ON': '',
'CXXFLAGS_MT': '',
'CXXFLAGS_MT_DBG': '',
'CXXFLAGS_MT_DLL': '',
'CXXFLAGS_MT_DLLDBG': '',
'CXXFLAGS_RELEASE': '-O2',
'CXXFLAGS_RTTI_OFF': '',
'CXXFLAGS_RTTI_ON': '',
'CXXFLAGS_SHLIB': '-fPIC',
'CXXFLAGS_STL_OFF': '',
'CXXFLAGS_STL_ON': '',
'CXXFLAGS_THREAD': '-D_REENTRANT',
'CXXFLAGS_WARN_OFF': '-w',
'CXXFLAGS_WARN_ON': '-Wall -W',
'DEFINES': '',
'DEL_FILE': 'rm -f',
'EXTENSION_PLUGIN': '',
'EXTENSION_SHLIB': '',
'INCDIR': '',
'INCDIR_OPENGL': '/usr/X11R6/include',
'INCDIR_X11': '/usr/X11R6/include',
'LFLAGS': '',
'LFLAGS_CONSOLE': '',
'LFLAGS_CONSOLE_DLL': '',
'LFLAGS_DEBUG': '',
'LFLAGS_DLL': '',
'LFLAGS_OPENGL': '',
'LFLAGS_PLUGIN': '-shared',
'LFLAGS_RELEASE': '',
'LFLAGS_RPATH': '',
'LFLAGS_SHLIB': '-shared',
'LFLAGS_SONAME': '-Wl,-soname,',
'LFLAGS_THREAD': '',
'LFLAGS_WINDOWS': '',
'LFLAGS_WINDOWS_DLL': '',
'LIB': '',
'LIBDIR': '',
'LIBDIR_OPENGL': '/usr/X11R6/lib',
'LIBDIR_X11': '/usr/X11R6/lib',
'LIBS': '',
'LIBS_CONSOLE': '',
'LIBS_CORE': '',
'LIBS_GUI': '',
'LIBS_NETWORK': '',
'LIBS_OPENGL': '-lGLU -lGL',
'LIBS_RT': '',
'LIBS_RTMT': '',
'LIBS_THREAD': '-lpthread',
'LIBS_WEBKIT': '',
'LIBS_WINDOWS': '',
'LIBS_X11': '-lXext -lX11 -lm',
'LINK': 'g++',
'LINK_SHLIB': 'g++',
'LINK_SHLIB_CMD': '',
'MAKEFILE_GENERATOR': 'UNIX',
'MKDIR': 'mkdir -p',
'RANLIB': '',
'RPATH': '-Wl,-rpath,',
'STRIP': 'strip'
}
# The stack of configuration dictionaries.
_config_stack = []
class Configuration(object):
"""The class that represents SIP configuration values.
"""
def __init__(self, sub_cfg=None):
"""Initialise an instance of the class.
sub_cfg is the list of sub-class configurations. It should be None
when called normally.
"""
# Find the build macros in the closest imported module from where this
# was originally defined.
self._macros = None
for cls in self.__class__.__mro__:
if cls is object:
continue
mod = sys.modules[cls.__module__]
if hasattr(mod, "_default_macros"):
self._macros = mod._default_macros
break
if sub_cfg:
cfg = sub_cfg
else:
cfg = []
cfg.append(_pkg_config)
global _config_stack
_config_stack = cfg
def __getattr__(self, name):
"""Allow configuration values and user options to be handled as
instance variables.
name is the name of the configuration value or user option.
"""
for cfg in _config_stack:
try:
return cfg[name]
except KeyError:
pass
raise AttributeError("\"%s\" is not a valid configuration value or user option" % name)
def build_macros(self):
"""Return the dictionary of platform specific build macros.
"""
return self._macros
def set_build_macros(self, macros):
"""Set the dictionary of build macros to be use when generating
Makefiles.
macros is the dictionary of platform specific build macros.
"""
self._macros = macros
class _UniqueList:
"""A limited list that ensures all its elements are unique.
"""
def __init__(self, value=None):
"""Initialise the instance.
value is the initial value of the list.
"""
if value is None:
self._list = []
else:
self._list = value
def append(self, value):
"""Append a value to the list if it isn't already present.
value is the value to append.
"""
if value not in self._list:
self._list.append(value)
def lextend(self, value):
"""A normal list extend ignoring the uniqueness.
value is the list of elements to append.
"""
self._list.extend(value)
def extend(self, value):
"""Append each element of a value to a list if it isn't already
present.
value is the list of elements to append.
"""
for el in value:
self.append(el)
def as_list(self):
"""Return the list as a raw list.
"""
return self._list
class _Macro:
"""A macro that can be manipulated as a list.
"""
def __init__(self, name, value):
"""Initialise the instance.
name is the name of the macro.
value is the initial value of the macro.
"""
self._name = name
self.set(value)
def set(self, value):
"""Explicitly set the value of the macro.
value is the new value. It may be a string, a list of strings or a
_UniqueList instance.
"""
self._macro = []
if isinstance(value, _UniqueList):
value = value.as_list()
if type(value) == list:
self.extend(value)
else:
self.append(value)
def append(self, value):
"""Append a value to the macro.
value is the value to append.
"""
if value:
self._macro.append(value)
def extend(self, value):
"""Append each element of a value to the macro.
value is the list of elements to append.
"""
for el in value:
self.append(el)
def remove(self, value):
"""Remove a value from the macro. It doesn't matter if the value
wasn't present.
value is the value to remove.
"""
try:
self._macro.remove(value)
except:
pass
def as_list(self):
"""Return the macro as a list.
"""
return self._macro
class Makefile:
"""The base class for the different types of Makefiles.
"""
def __init__(self, configuration, console=0, qt=0, opengl=0, python=0,
threaded=0, warnings=1, debug=0, dir=None,
makefile="Makefile", installs=None, universal=None,
arch=None, deployment_target=None):
"""Initialise an instance of the target. All the macros are left
unchanged allowing scripts to manipulate them at will.
configuration is the current configuration.
console is set if the target is a console (rather than windows) target.
qt is set if the target uses Qt. For Qt v4 a list of Qt libraries may
be specified and a simple non-zero value implies QtCore and QtGui.
opengl is set if the target uses OpenGL.
python is set if the target #includes Python.h.
debug is set to generated a debugging version of the target.
threaded is set if the target requires thread support. It is
automatically set if the target uses Qt and Qt has thread support
enabled.
warnings is set if compiler warning messages are required.
debug is set if debugging symbols should be generated.
dir is the directory for build files and Makefiles.
makefile is the name of the Makefile.
installs is a list of extra install targets. Each element is a two
part list, the first of which is the source and the second is the
destination. If the source is another list then it is a set of source
files and the destination is a directory. If the destination is None
then the source is a command to run.
universal is the name of the SDK if the target is a MacOS/X universal
binary. If it is None then the value is taken from the configuration.
arch is the space separated MacOS/X architectures to build. If it is
None then it is taken from the configuration.
deployment_target MacOS/X deployment target. If it is None then it is
taken from the configuration.
"""
if qt:
if not hasattr(configuration, "qt_version"):
error("The target uses Qt but pyqtconfig has not been imported.")
# For Qt v4 interpret Qt support as meaning link against the core
# and GUI libraries (which corresponds to the default qmake
# configuration). Also allow a list of Qt v4 modules to be
# specified.
if configuration.qt_version >= 0x040000:
if type(qt) != list:
qt = ["QtCore", "QtGui"]
self._threaded = configuration.qt_threaded
else:
self._threaded = threaded
self.config = configuration
self.console = console
self._qt = qt
self._opengl = opengl
self._python = python
self._warnings = warnings
self._debug = debug
self._makefile = makefile
self._installs = installs
self._infix = ""
# Make sure the destination directory is an absolute path.
if dir:
self.dir = os.path.abspath(dir)
else:
self.dir = os.getcwd()
# Assume we are building in the source tree.
self._src_dir = self.dir
if universal is None:
self._universal = configuration.universal
else:
self._universal = universal
if arch is None:
self._arch = configuration.arch
else:
self._arch = arch
if deployment_target is None:
self._deployment_target = configuration.deployment_target
else:
self._deployment_target = deployment_target
self._finalised = 0
# Copy the macros and convert them all to instance lists.
macros = configuration.build_macros()
for m in list(macros.keys()):
# Allow the user to override the default.
try:
val = getattr(configuration, m)
except AttributeError:
val = macros[m]
# These require special handling as they are (potentially) a set of
# space separated values rather than a single value that might
# contain spaces.
if m in ("DEFINES", "CONFIG") or m[:6] in ("INCDIR", "LIBDIR"):
val = val.split()
# We also want to treat lists of libraries in the same way so that
# duplicates get eliminated.
if m[:4] == "LIBS":
val = val.split()
self.__dict__[m] = _Macro(m, val)
# This is used to alter the configuration more significantly than can
# be done with just configuration files.
self.generator = self.optional_string("MAKEFILE_GENERATOR", "UNIX")
# These are what configuration scripts normally only need to change.
self.extra_cflags = []
self.extra_cxxflags = []
self.extra_defines = []
self.extra_include_dirs = []
self.extra_lflags = []
self.extra_lib_dirs = []
self.extra_libs = []
self.extra_source_dirs = []
# Get these once and make them available to sub-classes.
if sys.platform == "win32":
def_copy = "copy"
def_rm = "del"
def_mkdir = "mkdir"
def_chk_dir_exists = "if not exist"
else:
def_copy = "cp -f"
def_rm = "rm -f"
def_mkdir = "mkdir -p"
def_chk_dir_exists = "test -d"
self.copy = self.optional_string("COPY", def_copy)
self.rm = self.optional_string("DEL_FILE", def_rm)
self.mkdir = self.optional_string("MKDIR", def_mkdir)
self.chkdir = self.optional_string("CHK_DIR_EXISTS", def_chk_dir_exists)
def finalise(self):
"""Finalise the macros by doing any consolidation that isn't specific
to a Makefile.
"""
# Extract the things we might need from the Windows Qt configuration.
# Note that we used to think that if Qt was built with exceptions, RTTI
# and STL support enabled then anything that linked against it also
# needed the same flags. However, detecting this was broken for some
# time and nobody complained. For the moment we'll leave the code in
# but it will never be used.
if self._qt:
wcfg = self.config.qt_winconfig.split()
win_shared = ("shared" in wcfg)
win_exceptions = ("exceptions" in wcfg)
win_rtti = ("rtti" in wcfg)
win_stl = ("stl" in wcfg)
qt_version = self.config.qt_version
else:
win_shared = 1
win_exceptions = 0
win_rtti = 0
win_stl = 0
qt_version = 0
# Get what we are going to transform.
cflags = _UniqueList()
cflags.extend(self.extra_cflags)
cflags.extend(self.optional_list("CFLAGS"))
cxxflags = _UniqueList()
cxxflags.extend(self.extra_cxxflags)
cxxflags.extend(self.optional_list("CXXFLAGS"))
defines = _UniqueList()
defines.extend(self.extra_defines)
defines.extend(self.optional_list("DEFINES"))
incdir = _UniqueList(["."])
incdir.extend(self.extra_include_dirs)
incdir.extend(self.optional_list("INCDIR"))
lflags = _UniqueList()
lflags.extend(self.extra_lflags)
lflags.extend(self.optional_list("LFLAGS"))
libdir = _UniqueList()
libdir.extend(self.extra_lib_dirs)
libdir.extend(self.optional_list("LIBDIR"))
# Handle MacOS/X specific configuration.
if sys.platform == 'darwin':
mac_cflags = []
mac_lflags = []
for a in self._arch.split():
aflag = '-arch ' + a
mac_cflags.append(aflag)
mac_lflags.append(aflag)
if self._universal:
mac_cflags.append('-isysroot %s' % self._universal)
mac_lflags.append('-Wl,-syslibroot,%s' % self._universal)
cflags.lextend(mac_cflags)
cxxflags.lextend(mac_cflags)
lflags.lextend(mac_lflags)
# Don't use a unique list as libraries may need to be searched more
# than once. Also MacOS/X uses the form "-framework lib" so we don't
# want to lose the multiple "-framework".
libs = []
for l in self.extra_libs:
libs.append(self.platform_lib(l))
if self._qt:
libs.extend(self._dependent_libs(l))
libs.extend(self.optional_list("LIBS"))
rpaths = _UniqueList()
for l in self.extra_lib_dirs:
l_dir = os.path.dirname(l)
# This is a hack to ignore PyQt's internal support libraries.
if '/qpy/' in l_dir:
continue
# Ignore relative directories. This is really a hack to handle
# SIP v3 inter-module linking.
if l_dir in ("", ".", ".."):
continue
rpaths.append(l)
if self._python:
incdir.append(self.config.py_inc_dir)
incdir.append(self.config.py_conf_inc_dir)
if sys.platform == "cygwin":
libdir.append(self.config.py_lib_dir)
py_lib = "python%u.%u" % ((self.config.py_version >> 16), ((self.config.py_version >> 8) & 0xff))
libs.append(self.platform_lib(py_lib))
elif sys.platform == "win32":
libdir.append(self.config.py_lib_dir)
py_lib = "python%u%u" % ((self.config.py_version >> 16), ((self.config.py_version >> 8) & 0xff))
# For Borland use the OMF version of the Python library if it
# exists, otherwise assume that Python was built with Borland
# and use the normal library.
if self.generator == "BMAKE":
bpy_lib = py_lib + "_bcpp"
bpy_lib_path = os.path.join(self.config.py_lib_dir, self.platform_lib(bpy_lib))
if os.access(bpy_lib_path, os.F_OK):
py_lib = bpy_lib
if self._debug:
py_lib = py_lib + "_d"
if self.generator != "MINGW":
cflags.append("/D_DEBUG")
cxxflags.append("/D_DEBUG")
libs.append(self.platform_lib(py_lib))
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD", "BMAKE"):
if win_exceptions:
cflags_exceptions = "CFLAGS_EXCEPTIONS_ON"
cxxflags_exceptions = "CXXFLAGS_EXCEPTIONS_ON"
else:
cflags_exceptions = "CFLAGS_EXCEPTIONS_OFF"
cxxflags_exceptions = "CXXFLAGS_EXCEPTIONS_OFF"
cflags.extend(self.optional_list(cflags_exceptions))
cxxflags.extend(self.optional_list(cxxflags_exceptions))
if win_rtti:
cflags_rtti = "CFLAGS_RTTI_ON"
cxxflags_rtti = "CXXFLAGS_RTTI_ON"
else:
cflags_rtti = "CFLAGS_RTTI_OFF"
cxxflags_rtti = "CXXFLAGS_RTTI_OFF"
cflags.extend(self.optional_list(cflags_rtti))
cxxflags.extend(self.optional_list(cxxflags_rtti))
if win_stl:
cflags_stl = "CFLAGS_STL_ON"
cxxflags_stl = "CXXFLAGS_STL_ON"
else:
cflags_stl = "CFLAGS_STL_OFF"
cxxflags_stl = "CXXFLAGS_STL_OFF"
cflags.extend(self.optional_list(cflags_stl))
cxxflags.extend(self.optional_list(cxxflags_stl))
if self._debug:
if win_shared:
cflags_mt = "CFLAGS_MT_DLLDBG"
cxxflags_mt = "CXXFLAGS_MT_DLLDBG"
else:
cflags_mt = "CFLAGS_MT_DBG"
cxxflags_mt = "CXXFLAGS_MT_DBG"
cflags_debug = "CFLAGS_DEBUG"
cxxflags_debug = "CXXFLAGS_DEBUG"
lflags_debug = "LFLAGS_DEBUG"
else:
if win_shared:
cflags_mt = "CFLAGS_MT_DLL"
cxxflags_mt = "CXXFLAGS_MT_DLL"
else:
cflags_mt = "CFLAGS_MT"
cxxflags_mt = "CXXFLAGS_MT"
cflags_debug = "CFLAGS_RELEASE"
cxxflags_debug = "CXXFLAGS_RELEASE"
lflags_debug = "LFLAGS_RELEASE"
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD", "BMAKE"):
if self._threaded:
cflags.extend(self.optional_list(cflags_mt))
cxxflags.extend(self.optional_list(cxxflags_mt))
if self.console:
cflags.extend(self.optional_list("CFLAGS_CONSOLE"))
cxxflags.extend(self.optional_list("CXXFLAGS_CONSOLE"))
cflags.extend(self.optional_list(cflags_debug))
cxxflags.extend(self.optional_list(cxxflags_debug))
lflags.extend(self.optional_list(lflags_debug))
if self._warnings:
cflags_warn = "CFLAGS_WARN_ON"
cxxflags_warn = "CXXFLAGS_WARN_ON"
else:
cflags_warn = "CFLAGS_WARN_OFF"
cxxflags_warn = "CXXFLAGS_WARN_OFF"
cflags.extend(self.optional_list(cflags_warn))
cxxflags.extend(self.optional_list(cxxflags_warn))
if self._threaded:
cflags.extend(self.optional_list("CFLAGS_THREAD"))
cxxflags.extend(self.optional_list("CXXFLAGS_THREAD"))
lflags.extend(self.optional_list("LFLAGS_THREAD"))
if self._qt:
# Get the name of the mkspecs directory.
try:
specd_base = self.config.qt_data_dir
except AttributeError:
specd_base = self.config.qt_dir
mkspecs = os.path.join(specd_base, "mkspecs")
if self.generator != "UNIX" and win_shared:
defines.append("QT_DLL")
if not self._debug:
defines.append("QT_NO_DEBUG")
if qt_version >= 0x040000:
for mod in self._qt:
# Note that qmake doesn't define anything for QtHelp.
if mod == "QtCore":
defines.append("QT_CORE_LIB")
elif mod == "QtDeclarative":
defines.append("QT_DECLARATIVE_LIB")
elif mod == "QtGui":
defines.append("QT_GUI_LIB")
elif mod == "QtMultimedia":
defines.append("QT_MULTIMEDIA_LIB")
elif mod == "QtNetwork":
defines.append("QT_NETWORK_LIB")
elif mod == "QtOpenGL":
defines.append("QT_OPENGL_LIB")
elif mod == "QtScript":
defines.append("QT_SCRIPT_LIB")
elif mod == "QtScriptTools":
defines.append("QT_SCRIPTTOOLS_LIB")
elif mod == "QtSql":
defines.append("QT_SQL_LIB")
elif mod == "QtTest":
defines.append("QT_TEST_LIB")
elif mod == "QtWebKit":
defines.append("QT_WEBKIT_LIB")
elif mod == "QtXml":
defines.append("QT_XML_LIB")
elif mod == "QtXmlPatterns":
defines.append("QT_XMLPATTERNS_LIB")
elif mod == "phonon":
defines.append("QT_PHONON_LIB")
if qt_version >= 0x050000:
if mod == "QtTest":
defines.append("QT_GUI_LIB")
if mod in ("QtSql", "QtTest"):
defines.append("QT_WIDGETS_LIB")
elif self._threaded:
defines.append("QT_THREAD_SUPPORT")
# Handle library directories.
libdir_qt = self.optional_list("LIBDIR_QT")
libdir.extend(libdir_qt)
rpaths.extend(libdir_qt)
if qt_version >= 0x040000:
# Try and read QT_LIBINFIX from qconfig.pri.
qconfig = os.path.join(mkspecs, "qconfig.pri")
self._infix = self._extract_value(qconfig, "QT_LIBINFIX")
# For Windows: the macros that define the dependencies on
# Windows libraries.
wdepmap = {
"QtCore": "LIBS_CORE",
"QtGui": "LIBS_GUI",
"QtNetwork": "LIBS_NETWORK",
"QtOpenGL": "LIBS_OPENGL",
"QtWebKit": "LIBS_WEBKIT"
}
# For Windows: the dependencies between Qt libraries.
qt5_depmap = {
"QtDeclarative": ("QtXmlPatterns", "QtNetwork", "QtSql", "QtScript", "QtWidgets", "QtGui", "QtCore"),
"QtGui": ("QtPrintSupport", "QtWidgets", "QtCore"),
"QtHelp": ("QtNetwork", "QtSql", "QtWidgets", "QtGui", "QtCore"),
"QtMultimedia": ("QtGui", "QtCore"),
"QtNetwork": ("QtCore", ),
"QtOpenGL": ("QtWidgets", "QtGui", "QtCore"),
"QtScript": ("QtCore", ),
"QtScriptTools": ("QtScript", "QtGui", "QtCore"),
"QtSql": ("QtCore", ),
"QtSvg": ("QtXml", "QtWidgets", "QtGui", "QtCore"),
"QtTest": ("QtGui", "QtCore"),
"QtWebKit": ("QtNetwork", "QtWebKitWidgets", "QtWidgets", "QtGui", "QtCore"),
"QtXml": ("QtCore", ),
"QtXmlPatterns": ("QtNetwork", "QtCore"),
"QtDesigner": ("QtGui", "QtCore"),
"QAxContainer": ("Qt5AxBase", "QtWidgets", "QtGui", "QtCore")
}
qt4_depmap = {
"QtAssistant": ("QtNetwork", "QtGui", "QtCore"),
"QtDeclarative": ("QtNetwork", "QtGui", "QtCore"),
"QtGui": ("QtCore", ),
"QtHelp": ("QtSql", "QtGui", "QtCore"),
"QtMultimedia": ("QtGui", "QtCore"),
"QtNetwork": ("QtCore", ),
"QtOpenGL": ("QtGui", "QtCore"),
"QtScript": ("QtCore", ),
"QtScriptTools": ("QtScript", "QtGui", "QtCore"),
"QtSql": ("QtCore", ),
"QtSvg": ("QtXml", "QtGui", "QtCore"),
"QtTest": ("QtGui", "QtCore"),
"QtWebKit": ("QtNetwork", "QtGui", "QtCore"),
"QtXml": ("QtCore", ),
"QtXmlPatterns": ("QtNetwork", "QtCore"),
"phonon": ("QtGui", "QtCore"),
"QtDesigner": ("QtGui", "QtCore"),
"QAxContainer": ("QtGui", "QtCore")
}
if qt_version >= 0x050000:
qt_depmap = qt5_depmap
else:
qt_depmap = qt4_depmap
# The QtSql .prl file doesn't include QtGui as a dependency (at
# least on Linux) so we explcitly set the dependency here for
# everything.
if "QtSql" in self._qt:
if "QtGui" not in self._qt:
self._qt.append("QtGui")
# With Qt v4.2.0, the QtAssistantClient library is now a shared
# library on UNIX. The QtAssistantClient .prl file doesn't
# include QtGui and QtNetwork as a dependency any longer. This
# seems to be a bug in Qt v4.2.0. We explicitly set the
# dependencies here.
if qt_version >= 0x040200 and "QtAssistant" in self._qt:
if "QtGui" not in self._qt:
self._qt.append("QtGui")
if "QtNetwork" not in self._qt:
self._qt.append("QtNetwork")
for mod in self._qt:
lib = self._qt_module_to_lib(mod)
libs.append(self.platform_lib(lib, self._is_framework(mod)))
if sys.platform == "win32":
# On Windows the dependent libraries seem to be in
# qmake.conf rather than the .prl file and the
# inter-dependencies between Qt libraries don't seem to
# be anywhere.
deps = _UniqueList()
if mod in list(wdepmap.keys()):
deps.extend(self.optional_list(wdepmap[mod]))
if mod in list(qt_depmap.keys()):
for qdep in qt_depmap[mod]:
# Ignore the dependency if it is explicitly
# linked.
if qdep not in self._qt:
libs.append(self.platform_lib(self._qt_module_to_lib(qdep)))
if qdep in list(wdepmap.keys()):
deps.extend(self.optional_list(wdepmap[qdep]))
libs.extend(deps.as_list())
else:
libs.extend(self._dependent_libs(lib, self._is_framework(mod)))
else:
# Windows needs the version number appended if Qt is a DLL.
qt_lib = self.config.qt_lib
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD", "BMAKE") and win_shared:
qt_lib = qt_lib + version_to_string(qt_version).replace(".", "")
if self.config.qt_edition == "non-commercial":
qt_lib = qt_lib + "nc"
libs.append(self.platform_lib(qt_lib, self.config.qt_framework))
libs.extend(self._dependent_libs(self.config.qt_lib))
# Handle header directories.
specd = os.path.join(mkspecs, "default")
if not os.access(specd, os.F_OK):
specd = os.path.join(mkspecs, self.config.platform)
incdir.append(specd)
qtincdir = self.optional_list("INCDIR_QT")
if qtincdir:
if qt_version >= 0x040000:
for mod in self._qt:
if mod == "QAxContainer":
incdir.append(os.path.join(qtincdir[0], "ActiveQt"))
elif self._is_framework(mod):
idir = libdir_qt[0]
if mod == "QtAssistant" and qt_version < 0x040202:
mod = "QtAssistantClient"
incdir.append(os.path.join(idir,
mod + ".framework", "Headers"))
if qt_version >= 0x050000:
if mod == "QtGui":
incdir.append(os.path.join(idir,
"QtWidgets.framework", "Headers"))
incdir.append(os.path.join(idir,
"QtPrintSupport.framework",
"Headers"))
elif mod == "QtWebKit":
incdir.append(os.path.join(idir,
"QtWebKitWidgets.framework",
"Headers"))
else:
idir = qtincdir[0]
incdir.append(os.path.join(idir, mod))
if qt_version >= 0x050000:
if mod == "QtGui":
incdir.append(os.path.join(idir,
"QtWidgets"))
incdir.append(os.path.join(idir,
"QtPrintSupport"))
elif mod == "QtWebKit":
incdir.append(os.path.join(idir,
"QtWebKitWidgets"))
# This must go after the module include directories.
incdir.extend(qtincdir)
if self._opengl:
incdir.extend(self.optional_list("INCDIR_OPENGL"))
lflags.extend(self.optional_list("LFLAGS_OPENGL"))
libdir.extend(self.optional_list("LIBDIR_OPENGL"))
libs.extend(self.optional_list("LIBS_OPENGL"))
if self._qt or self._opengl:
if qt_version < 0x040000 or self._opengl or "QtGui" in self._qt:
incdir.extend(self.optional_list("INCDIR_X11"))
libdir.extend(self.optional_list("LIBDIR_X11"))
libs.extend(self.optional_list("LIBS_X11"))
if self._threaded:
libs.extend(self.optional_list("LIBS_THREAD"))
libs.extend(self.optional_list("LIBS_RTMT"))
else:
libs.extend(self.optional_list("LIBS_RT"))
if self.console:
libs.extend(self.optional_list("LIBS_CONSOLE"))
libs.extend(self.optional_list("LIBS_WINDOWS"))
lflags.extend(self._platform_rpaths(rpaths.as_list()))
# Save the transformed values.
self.CFLAGS.set(cflags)
self.CXXFLAGS.set(cxxflags)
self.DEFINES.set(defines)
self.INCDIR.set(incdir)
self.LFLAGS.set(lflags)
self.LIBDIR.set(libdir)
self.LIBS.set(libs)
# Don't do it again because it has side effects.
self._finalised = 1
def _add_manifest(self, target=None):
"""Add the link flags for creating a manifest file.
"""
if target is None:
target = "$(TARGET)"
self.LFLAGS.append("/MANIFEST")
self.LFLAGS.append("/MANIFESTFILE:%s.manifest" % target)
def _is_framework(self, mod):
"""Return true if the given Qt module is a framework.
"""
return (self.config.qt_framework and (self.config.qt_version >= 0x040200 or mod != "QtAssistant"))
def _qt_module_to_lib(self, mname):
"""Return the name of the Qt library corresponding to a module.
mname is the name of the module.
"""
qt_version = self.config.qt_version
if mname == "QtAssistant":
if qt_version >= 0x040202 and sys.platform == "darwin":
lib = mname
else:
lib = "QtAssistantClient"
else:
lib = mname
lib += self._infix
if self._debug:
if sys.platform == "win32":
lib = lib + "d"
elif sys.platform == "darwin":
if not self._is_framework(mname):
lib = lib + "_debug"
elif qt_version < 0x040200:
lib = lib + "_debug"
qt5_rename = False
if sys.platform == "win32" and "shared" in self.config.qt_winconfig.split():
if (mname in ("QtCore", "QtDeclarative", "QtDesigner", "QtGui",
"QtHelp", "QtMultimedia", "QtNetwork", "QtOpenGL",
"QtScript", "QtScriptTools", "QtSql", "QtSvg",
"QtTest", "QtWebKit", "QtXml", "QtXmlPatterns",
"phonon", "QAxContainer", "QtPrintSupport",
"QtWebKitWidgets", "QtWidgets") or
(qt_version >= 0x040200 and mname == "QtAssistant")):
if mname == "QAxContainer":
if qt_version >= 0x050000:
lib = "Qt5" + lib[1:]
elif qt_version >= 0x050000:
qt5_rename = True
else:
lib = lib + "4"
elif sys.platform.startswith("linux") and qt_version >= 0x050000:
qt5_rename = True
if qt5_rename:
lib = "Qt5" + lib[2:]
return lib
def optional_list(self, name):
"""Return an optional Makefile macro as a list.
name is the name of the macro.
"""
return self.__dict__[name].as_list()
def optional_string(self, name, default=""):
"""Return an optional Makefile macro as a string.
name is the name of the macro.
default is the default value
"""
s = ' '.join(self.optional_list(name))
if not s:
s = default
return s
def required_string(self, name):
"""Return a required Makefile macro as a string.
name is the name of the macro.
"""
s = self.optional_string(name)
if not s:
raise ValueError("\"%s\" must have a non-empty value" % name)
return s
def _platform_rpaths(self, rpaths):
"""Return a list of platform specific rpath flags.
rpaths is the cannonical list of rpaths.
"""
flags = []
prefix = self.optional_string("RPATH")
if prefix == "":
# This was renamed in Qt v4.7.
prefix = self.optional_string("LFLAGS_RPATH")
if prefix != "":
for r in rpaths:
flags.append(_quote(prefix + r))
return flags
def platform_lib(self, clib, framework=0):
"""Return a library name in platform specific form.
clib is the library name in cannonical form.
framework is set of the library is implemented as a MacOS framework.
"""
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD", "BMAKE"):
plib = clib + ".lib"
elif sys.platform == "darwin" and framework:
plib = "-framework " + clib
else:
plib = "-l" + clib
return plib
def _dependent_libs(self, clib, framework=0):
"""Return a list of additional libraries (in platform specific form)
that must be linked with a library.
clib is the library name in cannonical form.
framework is set of the library is implemented as a MacOS framework.
"""
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD", "BMAKE"):
prl_name = os.path.join(self.config.qt_lib_dir, clib + ".prl")
elif sys.platform == "darwin" and framework:
prl_name = os.path.join(self.config.qt_lib_dir, clib + ".framework", clib + ".prl")
else:
prl_name = os.path.join(self.config.qt_lib_dir, "lib" + clib + ".prl")
libs = self._extract_value(prl_name, "QMAKE_PRL_LIBS").split()
if self.config.qt_version >= 0x050000:
xtra_libs = []
if clib in ("QtGui", "Qt5Gui"):
xtra_libs.append("QtWidgets")
xtra_libs.append("QtPrintSupport")
elif clib in ("QtWebKit", "Qt5WebKit"):
xtra_libs.append("QtWebKitWidgets")
for xtra in xtra_libs:
libs.extend(
self.platform_lib(
self._qt_module_to_lib(xtra), framework).split())
return libs
def _extract_value(self, fname, vname):
"""Return the stripped value from a name=value line in a file.
fname is the name of the file.
vname is the name of the value.
"""
value = ""
if os.access(fname, os.F_OK):
try:
f = open(fname, "r")
except IOError:
error("Unable to open \"%s\"" % fname)
line = f.readline()
while line:
line = line.strip()
if line and line[0] != "#":
eq = line.find("=")
if eq > 0 and line[:eq].strip() == vname:
value = line[eq + 1:].strip()
break
line = f.readline()
f.close()
return value
def parse_build_file(self, filename):
"""
Parse a build file and return the corresponding dictionary.
filename is the name of the build file. If it is a dictionary instead
then its contents are validated.
"""
if type(filename) == dict:
bfname = "dictionary"
bdict = filename
else:
if os.path.isabs(filename):
# We appear to be building out of the source tree.
self._src_dir = os.path.dirname(filename)
bfname = filename
else:
bfname = os.path.join(self.dir, filename)
bdict = {}
try:
f = open(bfname, "r")
except IOError:
error("Unable to open \"%s\"" % bfname)
line_nr = 1
line = f.readline()
while line:
line = line.strip()
if line and line[0] != "#":
eq = line.find("=")
if eq <= 0:
error("\"%s\" line %d: Line must be in the form 'name = value value...'." % (bfname, line_nr))
bdict[line[:eq].strip()] = line[eq + 1:].strip()
line_nr = line_nr + 1
line = f.readline()
f.close()
# Check the compulsory values.
for i in ("target", "sources"):
try:
bdict[i]
except KeyError:
error("\"%s\" is missing from \"%s\"." % (i, bfname))
# Get the optional values.
for i in ("headers", "moc_headers"):
try:
bdict[i]
except KeyError:
bdict[i] = ""
# Generate the list of objects.
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD", "BMAKE"):
ext = ".obj"
else:
ext = ".o"
olist = []
for f in bdict["sources"].split():
root, discard = os.path.splitext(f)
olist.append(root + ext)
for f in bdict["moc_headers"].split():
if not self._qt:
error("\"%s\" defines \"moc_headers\" for a non-Qt module." % bfname)
root, discard = os.path.splitext(f)
olist.append("moc_" + root + ext)
bdict["objects"] = ' '.join(olist)
return bdict
def clean_build_file_objects(self, mfile, build):
"""Generate the clean target.
mfile is the file object.
build is the dictionary created from the build file.
"""
mfile.write("\t-%s $(TARGET)\n" % self.rm)
for f in build["objects"].split():
mfile.write("\t-%s %s\n" % (self.rm, f))
for f in build["moc_headers"].split():
root, discard = os.path.splitext(f)
mfile.write("\t-%s moc_%s.cpp\n" % (self.rm, root))
def ready(self):
"""The Makefile is now ready to be used.
"""
if not self._finalised:
self.finalise()
def generate(self):
"""Generate the Makefile.
"""
self.ready()
# Make sure the destination directory exists.
try:
os.makedirs(self.dir)
except:
pass
mfname = os.path.join(self.dir, self._makefile)
try:
mfile = open(mfname, "w")
except IOError:
error("Unable to create \"%s\"" % mfname)
self.generate_macros_and_rules(mfile)
self.generate_target_default(mfile)
self.generate_target_install(mfile)
if self._installs:
if type(self._installs) != list:
self._installs = [self._installs]
for src, dst in self._installs:
if dst is None:
mfile.write("\t%s\n" % src)
else:
self.install_file(mfile, src, dst)
self.generate_target_clean(mfile)
mfile.close()
def generate_macros_and_rules(self, mfile):
"""The default implementation of the macros and rules generation.
mfile is the file object.
"""
if self._deployment_target:
mfile.write("export MACOSX_DEPLOYMENT_TARGET = %s\n" % self._deployment_target)
# Really we want to be testing for nmake here instead since it does not grok '?='
maybe_env = '=' if sys.platform == "win32" else '?='
mfile.write("CC %s %s\n" % (maybe_env, self.required_string("CC")))
mfile.write("CXX %s %s\n" % (maybe_env, self.required_string("CXX")))
mfile.write("LINK %s %s\n" % (maybe_env, self.required_string("LINK")))
cppflags = []
if not self._debug:
cppflags.append("-DNDEBUG")
for f in self.optional_list("DEFINES"):
cppflags.append("-D" + f)
for f in self.optional_list("INCDIR"):
cppflags.append("-I" + _quote(f))
libs = []
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD"):
libdir_prefix = "/LIBPATH:"
else:
libdir_prefix = "-L"
for ld in self.optional_list("LIBDIR"):
if sys.platform == "darwin" and self.config.qt_framework:
fflag = "-F" + _quote(ld)
libs.append(fflag)
cppflags.append(fflag)
libs.append(libdir_prefix + _quote(ld))
libs.extend(self.optional_list("LIBS"))
mfile.write("CPPFLAGS = %s\n" % ' '.join(cppflags))
mfile.write("CFLAGS = %s\n" % self.optional_string("CFLAGS"))
mfile.write("CXXFLAGS = %s\n" % self.optional_string("CXXFLAGS"))
mfile.write("LFLAGS = %s\n" % self.optional_string("LFLAGS"))
mfile.write("LIBS = %s\n" % ' '.join(libs))
if self._qt:
mfile.write("MOC = %s\n" % _quote(self.required_string("MOC")))
vpath = _UniqueList(self.extra_source_dirs)
if self._src_dir != self.dir:
vpath.append(self._src_dir)
if vpath.as_list():
mfile.write("VPATH = %s\n\n" % " ".join(vpath.as_list()))
# These probably don't matter.
if self.generator == "MINGW":
mfile.write(".SUFFIXES: .cpp .cxx .cc .C .c\n\n")
elif self.generator == "UNIX":
mfile.write(".SUFFIXES: .c .o .cpp .cc .cxx .C\n\n")
else:
mfile.write(".SUFFIXES: .c .cpp .cc .cxx .C\n\n")
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD"):
mfile.write("""
{.}.cpp{}.obj::
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -Fo @<<
\t$<
<<
{.}.cc{}.obj::
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -Fo @<<
\t$<
<<
{.}.cxx{}.obj::
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -Fo @<<
\t$<
<<
{.}.C{}.obj::
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -Fo @<<
\t$<
<<
{.}.c{}.obj::
\t$(CC) -c $(CFLAGS) $(CPPFLAGS) -Fo @<<
\t$<
<<
""")
elif self.generator == "BMAKE":
mfile.write("""
.cpp.obj:
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -o$@ $<
.cc.obj:
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -o$@ $<
.cxx.obj:
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -o$@ $<
.C.obj:
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -o$@ $<
.c.obj:
\t$(CC) -c $(CFLAGS) $(CPPFLAGS) -o$@ $<
""")
else:
mfile.write("""
.cpp.o:
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -o $@ $<
.cc.o:
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -o $@ $<
.cxx.o:
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -o $@ $<
.C.o:
\t$(CXX) -c $(CXXFLAGS) $(CPPFLAGS) -o $@ $<
.c.o:
\t$(CC) -c $(CFLAGS) $(CPPFLAGS) -o $@ $<
""")
def generate_target_default(self, mfile):
"""The default implementation of the default target.
mfile is the file object.
"""
mfile.write("\nall:\n")
def generate_target_install(self, mfile):
"""The default implementation of the install target.
mfile is the file object.
"""
mfile.write("\ninstall:\n")
def generate_target_clean(self, mfile):
"""The default implementation of the clean target.
mfile is the file object.
"""
mfile.write("\nclean:\n")
def install_file(self, mfile, src, dst, strip=0):
"""Install one or more files in a directory.
mfile is the file object.
src is the name of a single file to install, or the list of a number of
files to install.
dst is the name of the destination directory.
strip is set if the files should be stripped after been installed.
"""
# Help package builders.
if self.generator == "UNIX":
dst = "$(DESTDIR)" + dst
mfile.write("\t@%s %s " % (self.chkdir, _quote(dst)))
if self.generator == "UNIX":
mfile.write("|| ")
mfile.write("%s %s\n" % (self.mkdir, _quote(dst)))
if type(src) != list:
src = [src]
# Get the strip command if needed.
if strip:
strip_cmd = self.optional_string("STRIP")
if not strip_cmd:
strip = 0
for sf in src:
target = _quote(os.path.join(dst, os.path.basename(sf)))
mfile.write("\t%s %s %s\n" % (self.copy, _quote(sf), target))
if strip:
mfile.write("\t%s %s\n" % (strip_cmd, target))
class ParentMakefile(Makefile):
"""The class that represents a parent Makefile.
"""
def __init__(self, configuration, subdirs, dir=None, makefile="Makefile",
installs=None):
"""Initialise an instance of a parent Makefile.
subdirs is the sequence of subdirectories.
"""
Makefile.__init__(self, configuration, dir=dir, makefile=makefile, installs=installs)
self._subdirs = subdirs
def generate_macros_and_rules(self, mfile):
"""Generate the macros and rules.
mfile is the file object.
"""
# We don't want them.
pass
def generate_target_default(self, mfile):
"""Generate the default target.
mfile is the file object.
"""
self._subdir_target(mfile)
def generate_target_install(self, mfile):
"""Generate the install target.
mfile is the file object.
"""
self._subdir_target(mfile, "install")
def generate_target_clean(self, mfile):
"""Generate the clean target.
mfile is the file object.
"""
self._subdir_target(mfile, "clean")
def _subdir_target(self, mfile, target="all"):
"""Create a target for a list of sub-directories.
mfile is the file object.
target is the name of the target.
"""
if target == "all":
tname = ""
else:
tname = " " + target
mfile.write("\n" + target + ":\n")
for d in self._subdirs:
if self.generator == "MINGW":
mfile.write("\t@$(MAKE) -C %s%s\n" % (d, tname))
elif self.generator == "UNIX":
mfile.write("\t@(cd %s; $(MAKE)%s)\n" % (d, tname))
else:
mfile.write("\tcd %s\n" % d)
mfile.write("\t$(MAKE)%s\n" % tname)
mfile.write("\t@cd ..\n")
class PythonModuleMakefile(Makefile):
"""The class that represents a Python module Makefile.
"""
def __init__(self, configuration, dstdir, srcdir=None, dir=None,
makefile="Makefile", installs=None):
"""Initialise an instance of a parent Makefile.
dstdir is the name of the directory where the module's Python code will
be installed.
srcdir is the name of the directory (relative to the directory in which
the Makefile will be created) containing the module's Python code. It
defaults to the same directory.
"""
Makefile.__init__(self, configuration, dir=dir, makefile=makefile, installs=installs)
if not srcdir:
srcdir = "."
if dir:
self._moddir = os.path.join(dir, srcdir)
else:
self._moddir = srcdir
self._srcdir = srcdir
self._dstdir = dstdir
def generate_macros_and_rules(self, mfile):
"""Generate the macros and rules.
mfile is the file object.
"""
# We don't want them.
pass
def generate_target_install(self, mfile):
"""Generate the install target.
mfile is the file object.
"""
Makefile.generate_target_install(self, mfile)
for root, dirs, files in os.walk(self._moddir):
# Do not recurse into certain directories.
for skip in (".svn", "CVS"):
if skip in dirs:
dirs.remove(skip)
tail = root[len(self._moddir):]
flist = []
for f in files:
if f == "Makefile":
continue
if os.path.isfile(os.path.join(root, f)):
flist.append(os.path.join(self._srcdir + tail, f))
self.install_file(mfile, flist, self._dstdir + tail)
class ModuleMakefile(Makefile):
"""The class that represents a Python extension module Makefile
"""
def __init__(self, configuration, build_file, install_dir=None, static=0,
console=0, qt=0, opengl=0, threaded=0, warnings=1, debug=0,
dir=None, makefile="Makefile", installs=None, strip=1,
export_all=0, universal=None, arch=None,
deployment_target=None):
"""Initialise an instance of a module Makefile.
build_file is the file containing the target specific information. If
it is a dictionary instead then its contents are validated.
install_dir is the directory the target will be installed in.
static is set if the module should be built as a static library.
strip is set if the module should be stripped of unneeded symbols when
installed. The default is 1.
export_all is set if all the module's symbols should be exported rather
than just the module's initialisation function. Exporting all symbols
increases the size of the module and slows down module load times but
may avoid problems with modules that use exceptions. The default is 0.
"""
Makefile.__init__(self, configuration, console, qt, opengl, 1, threaded, warnings, debug, dir, makefile, installs, universal, arch, deployment_target)
self._build = self.parse_build_file(build_file)
self._install_dir = install_dir
self.static = static
self._manifest = ("embed_manifest_dll" in self.optional_list("CONFIG"))
# Don't strip or restrict the exports if this is a debug or static
# build.
if debug or static:
self._strip = 0
self._limit_exports = 0
else:
self._strip = strip
self._limit_exports = not export_all
# Save the target name for later.
self._target = self._build["target"]
# The name of the module entry point is Python version specific.
if self.config.py_version >= 0x030000:
self._entry_point = "PyInit_%s" % self._target
else:
self._entry_point = "init%s" % self._target
if sys.platform != "win32" and static:
self._target = "lib" + self._target
if sys.platform == "win32" and debug:
self._target = self._target + "_d"
def finalise(self):
"""Finalise the macros common to all module Makefiles.
"""
if self.console:
lflags_console = "LFLAGS_CONSOLE"
else:
lflags_console = "LFLAGS_WINDOWS"
if self.static:
self.DEFINES.append("SIP_STATIC_MODULE")
else:
self.CFLAGS.extend(self.optional_list("CFLAGS_SHLIB"))
self.CXXFLAGS.extend(self.optional_list("CXXFLAGS_SHLIB"))
lflags_dll = self.optional_list("LFLAGS_DLL")
if lflags_dll:
self.LFLAGS.extend(lflags_dll)
elif self.console:
lflags_console = "LFLAGS_CONSOLE_DLL"
else:
lflags_console = "LFLAGS_WINDOWS_DLL"
if self._manifest:
self._add_manifest()
# We use this to explictly create bundles on MacOS. Apple's Python
# can handle extension modules that are bundles or dynamic
# libraries, but python.org versions need bundles (unless built
# with DYNLOADFILE=dynload_shlib.o).
if sys.platform == "darwin":
lflags_plugin = ["-bundle"]
else:
lflags_plugin = self.optional_list("LFLAGS_PLUGIN")
if not lflags_plugin:
lflags_plugin = self.optional_list("LFLAGS_SHLIB")
self.LFLAGS.extend(lflags_plugin)
self.LFLAGS.extend(self.optional_list(lflags_console))
if sys.platform == "darwin":
self.LFLAGS.append("-undefined dynamic_lookup")
Makefile.finalise(self)
if not self.static:
if self.optional_string("AIX_SHLIB"):
# AIX needs a lot of special handling.
if self.required_string('LINK') == 'g++':
# g++ is used for linking.
# For SIP v4 and g++:
# 1.) Import the python symbols
aix_lflags = ['-Wl,-bI:%s/python.exp' % self.config.py_lib_dir]
if self._limit_exports:
aix_lflags.append('-Wl,-bnoexpall')
aix_lflags.append('-Wl,-bnoentry')
aix_lflags.append('-Wl,-bE:%s.exp' % self._target)
else:
# IBM VisualAge C++ is used for linking.
# For SIP v4 and xlC:
# 1.) Create a shared object
# 2.) Import the python symbols
aix_lflags = ['-qmkshrobj',
'-bI:%s/python.exp' % self.config.py_lib_dir]
if self._limit_exports:
aix_lflags.append('-bnoexpall')
aix_lflags.append('-bnoentry')
aix_lflags.append('-bE:%s.exp' % self._target)
self.LFLAGS.extend(aix_lflags)
else:
if self._limit_exports:
if sys.platform[:5] == 'linux':
self.LFLAGS.extend(['-Wl,--version-script=%s.exp' % self._target])
elif sys.platform[:5] == 'sunos':
if self.required_string('LINK') == 'g++':
self.LFLAGS.extend(['-Wl,-z,noversion', '-Wl,-M,%s.exp' % self._target])
else:
self.LFLAGS.extend(['-z' 'noversion', '-M', '%s.exp' % self._target])
elif sys.platform[:5] == 'hp-ux':
self.LFLAGS.extend(['-Wl,+e,%s' % self._entry_point])
elif sys.platform[:5] == 'irix' and self.required_string('LINK') != 'g++':
# Doesn't work when g++ is used for linking on IRIX.
self.LFLAGS.extend(['-Wl,-exported_symbol,%s' % self._entry_point])
# Force the shared linker if there is one.
link_shlib = self.optional_list("LINK_SHLIB")
if link_shlib:
self.LINK.set(link_shlib)
# This made an appearence in Qt v4.4rc1 and breaks extension modules so
# remove it. It was removed at my request but some stupid distros may
# have kept it.
self.LFLAGS.remove('-Wl,--no-undefined')
def module_as_lib(self, mname):
"""Return the name of a SIP v3.x module when it is used as a library.
This will raise an exception when used with SIP v4.x modules.
mname is the name of the module.
"""
raise ValueError("module_as_lib() can only be used with SIP v3.x")
def generate_macros_and_rules(self, mfile):
"""Generate the macros and rules generation.
mfile is the file object.
"""
if self.static:
if sys.platform == "win32":
ext = "lib"
else:
ext = "a"
else:
if sys.platform == "win32":
ext = "pyd"
elif sys.platform == "darwin":
ext = "so"
elif sys.platform == "cygwin":
ext = "dll"
else:
ext = self.optional_string("EXTENSION_PLUGIN")
if not ext:
ext = self.optional_string("EXTENSION_SHLIB", "so")
mfile.write("TARGET = %s\n" % (self._target + "." + ext))
mfile.write("OFILES = %s\n" % self._build["objects"])
mfile.write("HFILES = %s %s\n" % (self._build["headers"], self._build["moc_headers"]))
mfile.write("\n")
if self.static:
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD", "BMAKE"):
mfile.write("LIB = %s\n" % self.required_string("LIB"))
elif self.generator == "MINGW":
mfile.write("AR = %s\n" % self.required_string("LIB"))
self._ranlib = None
else:
mfile.write("AR = %s\n" % self.required_string("AR"))
self._ranlib = self.optional_string("RANLIB")
if self._ranlib:
mfile.write("RANLIB = %s\n" % self._ranlib)
Makefile.generate_macros_and_rules(self, mfile)
def generate_target_default(self, mfile):
"""Generate the default target.
mfile is the file object.
"""
# Do these first so that it's safe for a sub-class to append additional
# commands to the real target, but make sure the default is correct.
mfile.write("\nall: $(TARGET)\n")
mfile.write("\n$(OFILES): $(HFILES)\n")
for mf in self._build["moc_headers"].split():
root, discard = os.path.splitext(mf)
cpp = "moc_" + root + ".cpp"
mfile.write("\n%s: %s\n" % (cpp, mf))
mfile.write("\t$(MOC) -o %s $<\n" % cpp)
mfile.write("\n$(TARGET): $(OFILES)\n")
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD"):
if self.static:
mfile.write("\t$(LIB) /OUT:$(TARGET) @<<\n")
mfile.write("\t $(OFILES)\n")
mfile.write("<<\n")
else:
mfile.write("\t$(LINK) $(LFLAGS) /OUT:$(TARGET) @<<\n")
mfile.write("\t $(OFILES) $(LIBS)\n")
mfile.write("<<\n")
if self._manifest:
mfile.write("\tmt -nologo -manifest $(TARGET).manifest -outputresource:$(TARGET);2\n")
elif self.generator == "BMAKE":
if self.static:
mfile.write("\t-%s $(TARGET)\n" % (self.rm))
mfile.write("\t$(LIB) $(TARGET) @&&|\n")
for of in self._build["objects"].split():
mfile.write("+%s \\\n" % (of))
mfile.write("|\n")
else:
mfile.write("\t$(LINK) @&&|\n")
mfile.write("\t$(LFLAGS) $(OFILES) ,$(TARGET),,$(LIBS),%s\n" % (self._target))
mfile.write("|\n")
# Create the .def file that renames the entry point.
defname = os.path.join(self.dir, self._target + ".def")
try:
dfile = open(defname, "w")
except IOError:
error("Unable to create \"%s\"" % defname)
dfile.write("EXPORTS\n")
dfile.write("%s=_%s\n" % (self._entry_point, self._entry_point))
dfile.close()
else:
if self.static:
mfile.write("\t-%s $(TARGET)\n" % self.rm)
mfile.write("\t$(AR) $(TARGET) $(OFILES)\n")
if self._ranlib:
mfile.write("\t$(RANLIB) $(TARGET)\n")
else:
if self._limit_exports:
# Create an export file for AIX, Linux and Solaris.
if sys.platform[:5] == 'linux':
mfile.write("\t@echo '{ global: %s; local: *; };' > %s.exp\n" % (self._entry_point, self._target))
elif sys.platform[:5] == 'sunos':
mfile.write("\t@echo '{ global: %s; local: *; };' > %s.exp\n" % (self._entry_point, self._target))
elif sys.platform[:3] == 'aix':
mfile.write("\t@echo '#!' >%s.exp" % self._target)
mfile.write("; \\\n\t echo '%s' >>%s.exp\n" % (self._entry_point, self._target))
mfile.write("\t$(LINK) $(LFLAGS) -o $(TARGET) $(OFILES) $(LIBS)\n")
def generate_target_install(self, mfile):
"""Generate the install target.
mfile is the file object.
"""
if self._install_dir is None:
self._install_dir = self.config.default_mod_dir
mfile.write("\ninstall: $(TARGET)\n")
self.install_file(mfile, "$(TARGET)", self._install_dir, self._strip)
def generate_target_clean(self, mfile):
"""Generate the clean target.
mfile is the file object.
"""
mfile.write("\nclean:\n")
self.clean_build_file_objects(mfile, self._build)
if self._manifest and not self.static:
mfile.write("\t-%s $(TARGET).manifest\n" % self.rm)
# Remove any export file on AIX, Linux and Solaris.
if self._limit_exports and (sys.platform[:5] == 'linux' or
sys.platform[:5] == 'sunos' or
sys.platform[:3] == 'aix'):
mfile.write("\t-%s %s.exp\n" % (self.rm, self._target))
class SIPModuleMakefile(ModuleMakefile):
"""The class that represents a SIP generated module Makefile.
"""
def __init__(self, configuration, build_file, install_dir=None, static=0,
console=0, qt=0, opengl=0, threaded=0, warnings=1, debug=0,
dir=None, makefile="Makefile", installs=None, strip=1,
export_all=0, universal=None, arch=None, prot_is_public=0,
deployment_target=None):
"""Initialise an instance of a SIP generated module Makefile.
prot_is_public is set if "protected" is to be redefined as "public".
If the platform's C++ ABI allows it this can significantly reduce the
size of the generated code.
For all other arguments see ModuleMakefile.
"""
ModuleMakefile.__init__(self, configuration, build_file, install_dir,
static, console, qt, opengl, threaded, warnings, debug, dir,
makefile, installs, strip, export_all, universal, arch,
deployment_target)
self._prot_is_public = prot_is_public
def finalise(self):
"""Finalise the macros for a SIP generated module Makefile.
"""
if self._prot_is_public:
self.DEFINES.append('SIP_PROTECTED_IS_PUBLIC')
self.DEFINES.append('protected=public')
self.INCDIR.append(self.config.sip_inc_dir)
ModuleMakefile.finalise(self)
class ProgramMakefile(Makefile):
"""The class that represents a program Makefile.
"""
def __init__(self, configuration, build_file=None, install_dir=None,
console=0, qt=0, opengl=0, python=0, threaded=0, warnings=1,
debug=0, dir=None, makefile="Makefile", installs=None,
universal=None, arch=None, deployment_target=None):
"""Initialise an instance of a program Makefile.
build_file is the file containing the target specific information. If
it is a dictionary instead then its contents are validated.
install_dir is the directory the target will be installed in.
"""
Makefile.__init__(self, configuration, console, qt, opengl, python, threaded, warnings, debug, dir, makefile, installs, universal, arch, deployment_target)
self._install_dir = install_dir
self._manifest = ("embed_manifest_exe" in self.optional_list("CONFIG"))
self._target = None
if build_file:
self._build = self.parse_build_file(build_file)
else:
self._build = None
def build_command(self, source):
"""Create a command line that will build an executable. Returns a
tuple of the name of the executable and the command line.
source is the name of the source file.
"""
# The name of the executable.
self._target, _ = os.path.splitext(source)
if sys.platform in ("win32", "cygwin"):
exe = self._target + ".exe"
else:
exe = self._target
self.ready()
# The command line.
build = []
build.append(self.required_string("CXX"))
for a in self._arch.split():
build.append('-arch ' + a)
for f in self.optional_list("DEFINES"):
build.append("-D" + f)
for f in self.optional_list("INCDIR"):
build.append("-I" + _quote(f))
build.extend(self.optional_list("CXXFLAGS"))
# This is for Qt5.
build.extend(self.optional_list("CXXFLAGS_APP"))
# Borland requires all flags to precede all file names.
if self.generator != "BMAKE":
build.append(source)
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD"):
build.append("-Fe")
build.append("/link")
libdir_prefix = "/LIBPATH:"
elif self.generator == "BMAKE":
build.append("-e" + exe)
libdir_prefix = "-L"
else:
build.append("-o")
build.append(exe)
libdir_prefix = "-L"
for ld in self.optional_list("LIBDIR"):
if sys.platform == "darwin" and self.config.qt_framework:
build.append("-F" + _quote(ld))
build.append(libdir_prefix + _quote(ld))
lflags = self.optional_list("LFLAGS")
# This is a huge hack demonstrating my lack of understanding of how the
# Borland compiler works.
if self.generator == "BMAKE":
blflags = []
for lf in lflags:
for f in lf.split():
# Tell the compiler to pass the flags to the linker.
if f[-1] == "-":
f = "-l-" + f[1:-1]
elif f[0] == "-":
f = "-l" + f[1:]
# Remove any explicit object files otherwise the compiler
# will complain that they can't be found, but they don't
# seem to be needed.
if f[-4:].lower() != ".obj":
blflags.append(f)
lflags = blflags
build.extend(lflags)
build.extend(self.optional_list("LIBS"))
if self.generator == "BMAKE":
build.append(source)
return (exe, ' '.join(build))
def finalise(self):
"""Finalise the macros for a program Makefile.
"""
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD"):
self.LFLAGS.append("/INCREMENTAL:NO")
if self._manifest:
self._add_manifest(self._target)
if self.console:
lflags_console = "LFLAGS_CONSOLE"
else:
lflags_console = "LFLAGS_WINDOWS"
self.LFLAGS.extend(self.optional_list(lflags_console))
Makefile.finalise(self)
def generate_macros_and_rules(self, mfile):
"""Generate the macros and rules generation.
mfile is the file object.
"""
if not self._build:
raise ValueError("pass a filename as build_file when generating a Makefile")
target = self._build["target"]
if sys.platform in ("win32", "cygwin"):
target = target + ".exe"
mfile.write("TARGET = %s\n" % target)
mfile.write("OFILES = %s\n" % self._build["objects"])
mfile.write("HFILES = %s\n" % self._build["headers"])
mfile.write("\n")
Makefile.generate_macros_and_rules(self, mfile)
def generate_target_default(self, mfile):
"""Generate the default target.
mfile is the file object.
"""
# Do these first so that it's safe for a sub-class to append additional
# commands to the real target, but make sure the default is correct.
mfile.write("\nall: $(TARGET)\n")
mfile.write("\n$(OFILES): $(HFILES)\n")
for mf in self._build["moc_headers"].split():
root, _ = os.path.splitext(mf)
cpp = "moc_" + root + ".cpp"
if self._src_dir != self.dir:
mf = os.path.join(self._src_dir, mf)
mfile.write("\n%s: %s\n" % (cpp, mf))
mfile.write("\t$(MOC) -o %s $<\n" % cpp)
mfile.write("\n$(TARGET): $(OFILES)\n")
if self.generator in ("MSVC", "MSVC.NET", "MSBUILD"):
mfile.write("\t$(LINK) $(LFLAGS) /OUT:$(TARGET) @<<\n")
mfile.write("\t $(OFILES) $(LIBS)\n")
mfile.write("<<\n")
elif self.generator == "BMAKE":
mfile.write("\t$(LINK) @&&|\n")
mfile.write("\t$(LFLAGS) $(OFILES) ,$(TARGET),,$(LIBS),,\n")
mfile.write("|\n")
else:
mfile.write("\t$(LINK) $(LFLAGS) -o $(TARGET) $(OFILES) $(LIBS)\n")
if self._manifest:
mfile.write("\tmt -nologo -manifest $(TARGET).manifest -outputresource:$(TARGET);1\n")
def generate_target_install(self, mfile):
"""Generate the install target.
mfile is the file object.
"""
if self._install_dir is None:
self._install_dir = self.config.default_bin_dir
mfile.write("\ninstall: $(TARGET)\n")
self.install_file(mfile, "$(TARGET)", self._install_dir)
def generate_target_clean(self, mfile):
"""Generate the clean target.
mfile is the file object.
"""
mfile.write("\nclean:\n")
self.clean_build_file_objects(mfile, self._build)
if self._manifest:
mfile.write("\t-%s $(TARGET).manifest\n" % self.rm)
def _quote(s):
"""Return a string surrounded by double quotes it if contains a space.
s is the string.
"""
# On Qt5 paths often includes forward slashes so convert them.
if sys.platform == "win32":
s = s.replace("/", "\\")
if s.find(" ") >= 0:
s = '"' + s + '"'
return s
def version_to_string(version, parts=3):
""" Convert an n-part version number encoded as a hexadecimal value to a
string. version is the version number. Returns the string.
"""
part_list = [str((version >> 16) & 0xff)]
if parts > 1:
part_list.append(str((version >> 8) & 0xff))
if parts > 2:
part_list.append(str(version & 0xff))
return '.'.join(part_list)
def version_from_string(version_str):
""" Convert a version string of the form m.n or m.n.o to an encoded version
number (or None if it was an invalid format). version_str is the version
string.
"""
parts = version_str.split('.')
if not isinstance(parts, list):
return None
if len(parts) == 2:
parts.append('0')
if len(parts) != 3:
return None
version = 0
for part in parts:
try:
v = int(part)
except ValueError:
return None
version = (version << 8) + v
return version
def read_version(filename, description, numdefine=None, strdefine=None):
"""Read the version information for a package from a file. The information
is specified as #defines of a numeric (hexadecimal or decimal) value and/or
a string value.
filename is the name of the file.
description is the descriptive name of the package.
numdefine is the name of the #define of the numeric version. It is ignored
if it is None.
strdefine is the name of the #define of the string version. It is ignored
if it is None.
Returns a tuple of the version as a number and as a string.
"""
need_num = numdefine is not None
need_str = strdefine is not None
vers = None
versstr = None
f = open(filename)
l = f.readline()
while l and (need_num or need_str):
wl = l.split()
if len(wl) >= 3 and wl[0] == "#define":
if need_num and wl[1] == numdefine:
v = wl[2]
if v[0:2] == "0x":
vers = int(v, 16)
else:
dec = int(v)
maj = dec / 100
min = (dec % 100) / 10
bug = (dec % 10)
vers = (maj << 16) + (min << 8) + bug
need_num = 0
if need_str and wl[1] == strdefine:
# Take account of embedded spaces.
versstr = ' '.join(wl[2:])[1:-1]
need_str = 0
l = f.readline()
f.close()
if need_num or need_str:
error("The %s version number could not be determined by parsing %s." % (description, filename))
return (vers, versstr)
def create_content(cdict, macros=None):
"""Convert a dictionary to a string (typically to use as the content to a
call to create_config_module()). Dictionary values that are strings are
quoted. Dictionary values that are lists are converted to quoted strings.
dict is the dictionary.
macros is the optional dictionary of platform specific build macros.
"""
content = "_pkg_config = {\n"
keys = list(cdict.keys())
keys.sort()
# Format it nicely.
width = 0
for k in keys:
klen = len(k)
if width < klen:
width = klen
for k in keys:
val = cdict[k]
vtype = type(val)
delim = None
if val is None:
val = "None"
elif vtype == list:
val = ' '.join(val)
delim = "'"
elif vtype == int:
if k.find("version") >= 0:
# Assume it's a hexadecimal version number. It doesn't matter
# if it isn't, we are just trying to make it look pretty.
val = "0x%06x" % val
else:
val = str(val)
else:
val = str(val)
delim = "'"
if delim:
if "'" in val:
delim = "'''"
val = delim + val + delim
content = content + " '" + k + "':" + (" " * (width - len(k) + 2)) + val.replace("\\", "\\\\")
if k != keys[-1]:
content = content + ","
content = content + "\n"
content = content + "}\n\n"
# Format the optional macros.
content = content + "_default_macros = "
if macros:
content = content + "{\n"
names = list(macros.keys())
names.sort()
width = 0
for c in names:
clen = len(c)
if width < clen:
width = clen
for c in names:
if c == names[-1]:
sep = ""
else:
sep = ","
val = macros[c]
if "'" in val:
delim = "'''"
else:
delim = "'"
k = "'" + c + "':"
content = content + " %-*s %s%s%s%s\n" % (1 + width + 2, k, delim, val.replace("\\", "\\\\"), delim, sep)
content = content + "}\n"
else:
content = content + "None\n"
return content
def create_config_module(module, template, content, macros=None):
"""Create a configuration module by replacing "@" followed by
"SIP_CONFIGURATION" followed by "@" in a template file with a content
string.
module is the name of the module file.
template is the name of the template file.
content is the content string. If it is a dictionary it is first converted
to a string using create_content().
macros is an optional dictionary of platform specific build macros. It is
only used if create_content() is called to convert the content to a string.
"""
if type(content) == dict:
content = create_content(content, macros)
# Allow this file to used as a template.
key = "@" + "SIP_CONFIGURATION" + "@"
df = open(module, "w")
sf = open(template, "r")
line = sf.readline()
while line:
if line.find(key) >= 0:
line = content
df.write(line)
line = sf.readline()
df.close()
sf.close()
def version_to_sip_tag(version, tags, description):
"""Convert a version number to a SIP tag.
version is the version number. If it is negative then the latest version
is assumed. (This is typically useful if a development preview is
indicated by a negative version number.)
tags is the dictionary of tags keyed by version number. The tag used is
the one with the smallest key (ie. earliest version) that is greater than
the given version number.
description is the descriptive name of the package used for error messages.
Returns the corresponding tag.
"""
vl = list(tags.keys())
vl.sort()
# For a preview use the latest tag.
if version < 0:
tag = tags[vl[-1]]
else:
for v in vl:
if version < v:
tag = tags[v]
break
else:
error("Unsupported %s version: 0x%06x." % (description, version))
return tag
def error(msg):
"""Display an error message and terminate.
msg is the text of the error message.
"""
sys.stderr.write(format("Error: " + msg) + "\n")
sys.exit(1)
def inform(msg):
"""Display an information message.
msg is the text of the error message.
"""
sys.stdout.write(format(msg) + "\n")
def format(msg, leftmargin=0, rightmargin=78):
"""Format a message by inserting line breaks at appropriate places.
msg is the text of the message.
leftmargin is the position of the left margin.
rightmargin is the position of the right margin.
Return the formatted message.
"""
curs = leftmargin
fmsg = " " * leftmargin
for w in msg.split():
l = len(w)
if curs != leftmargin and curs + l > rightmargin:
fmsg = fmsg + "\n" + (" " * leftmargin)
curs = leftmargin
if curs > leftmargin:
fmsg = fmsg + " "
curs = curs + 1
fmsg = fmsg + w
curs = curs + l
return fmsg
def parse_build_macros(filename, names, overrides=None, properties=None):
"""Parse a qmake compatible file of build system macros and convert it to a
dictionary. A macro is a name/value pair. The dictionary is returned or
None if any of the overrides was invalid.
filename is the name of the file to parse.
names is a list of the macro names to extract from the file.
overrides is an optional list of macro names and values that modify those
found in the file. They are of the form "name=value" (in which case the
value replaces the value found in the file) or "name+=value" (in which case
the value is appended to the value found in the file).
properties is an optional dictionary of property name and values that are
used to resolve any expressions of the form "$[name]" in the file.
"""
# Validate and convert the overrides to a dictionary.
orides = {}
if overrides is not None:
for oride in overrides:
prefix = ""
name_end = oride.find("+=")
if name_end >= 0:
prefix = "+"
val_start = name_end + 2
else:
name_end = oride.find("=")
if name_end >= 0:
val_start = name_end + 1
else:
return None
name = oride[:name_end]
if name not in names:
return None
orides[name] = prefix + oride[val_start:]
# This class defines a file like object that handles the nested include()
# directives in qmake files.
class qmake_build_file_reader:
def __init__(self, filename):
self.filename = filename
self.currentfile = None
self.filestack = []
self.pathstack = []
self.cond_fname = None
self._openfile(filename)
def _openfile(self, filename):
try:
f = open(filename, 'r')
except IOError:
# If this file is conditional then don't raise an error.
if self.cond_fname == filename:
return
error("Unable to open %s" % filename)
if self.currentfile:
self.filestack.append(self.currentfile)
self.pathstack.append(self.path)
self.currentfile = f
self.path = os.path.dirname(filename)
def readline(self):
line = self.currentfile.readline()
sline = line.strip()
if self.cond_fname and sline == '}':
# The current condition is closed.
self.cond_fname = None
line = self.currentfile.readline()
elif sline.startswith('exists(') and sline.endswith('{'):
# A new condition is opened so extract the filename.
self.cond_fname = self._normalise(sline[:-1].strip()[7:-1].strip())
line = self.currentfile.readline()
elif sline.startswith('include('):
nextfile = self._normalise(sline[8:-1].strip())
self._openfile(nextfile)
return self.readline()
if not line:
self.currentfile.close()
if self.filestack:
self.currentfile = self.filestack.pop()
self.path = self.pathstack.pop()
return self.readline()
return line
# Normalise a filename by expanding any environment variables and
# making sure it is absolute.
def _normalise(self, fname):
if "$(" in fname:
fname = os.path.normpath(self._expandvars(fname))
if not os.path.isabs(fname):
fname = os.path.join(self.path, fname)
return fname
# Expand the environment variables in a filename.
def _expandvars(self, fname):
i = 0
while True:
m = re.search(r'\$\((\w+)\)', fname[i:])
if not m:
break
i, j = m.span(0)
name = m.group(1)
if name in os.environ:
tail = fname[j:]
fname = fname[:i] + os.environ[name]
i = len(fname)
fname += tail
else:
i = j
return fname
f = qmake_build_file_reader(filename)
# Get everything into a dictionary.
raw = {
"DIR_SEPARATOR": os.sep,
"LITERAL_WHITESPACE": " ",
"LITERAL_DOLLAR": "$",
"LITERAL_HASH": "#"
}
line = f.readline()
while line:
# Handle line continuations.
while len(line) > 1 and line[-2] == "\\":
line = line[:-2]
next = f.readline()
if next:
line = line + next
else:
break
# Strip comments and surrounding whitespace.
line = line.split('#', 1)[0].strip()
if line:
assstart = line.find("+")
if assstart > 0 and line[assstart + 1] == '=':
adding = True
assend = assstart + 1
else:
adding = False
assstart = line.find("=")
assend = assstart
if assstart > 0:
lhs = line[:assstart].strip()
rhs = line[assend + 1:].strip()
# Remove the escapes for any quotes.
rhs = rhs.replace(r'\"', '"').replace(r"\'", "'")
if adding and rhs != "":
orig_rhs = raw.get(lhs)
if orig_rhs is not None:
rhs = orig_rhs + " " + rhs
raw[lhs] = _expand_macro_value(raw, rhs, properties)
line = f.readline()
# Go through the raw dictionary extracting the macros we need and
# resolving any macro expansions. First of all, make sure every macro has
# a value.
refined = {}
for m in names:
refined[m] = ""
macro_prefix = "QMAKE_"
for lhs in list(raw.keys()):
# Strip any prefix.
if lhs.startswith(macro_prefix):
reflhs = lhs[len(macro_prefix):]
else:
reflhs = lhs
# See if we are interested in this one.
if reflhs not in names:
continue
rhs = raw[lhs]
# Expand any POSIX style environment variables.
pleadin = ["$$(", "$("]
for pl in pleadin:
estart = rhs.find(pl)
if estart >= 0:
nstart = estart + len(pl)
break
else:
estart = -1
while estart >= 0:
eend = rhs[nstart:].find(")")
if eend < 0:
break
eend = nstart + eend
name = rhs[nstart:eend]
try:
env = os.environ[name]
except KeyError:
env = ""
rhs = rhs[:estart] + env + rhs[eend + 1:]
for pl in pleadin:
estart = rhs.find(pl)
if estart >= 0:
nstart = estart + len(pl)
break
else:
estart = -1
# Expand any Windows style environment variables.
estart = rhs.find("%")
while estart >= 0:
eend = rhs[estart + 1:].find("%")
if eend < 0:
break
eend = estart + 1 + eend
name = rhs[estart + 1:eend]
try:
env = os.environ[name]
except KeyError:
env = ""
rhs = rhs[:estart] + env + rhs[eend + 1:]
estart = rhs.find("%")
refined[reflhs] = rhs
# Handle the user overrides.
for lhs in list(orides.keys()):
rhs = refined[lhs]
oride = orides[lhs]
if oride.find("+") == 0:
if rhs:
rhs = rhs + " " + oride[1:]
else:
rhs = oride[1:]
else:
rhs = oride
refined[lhs] = rhs
return refined
def _expand_macro_value(macros, rhs, properties):
"""Expand the value of a macro based on ones seen so far."""
estart = rhs.find("$$(")
mstart = rhs.find("$$")
while mstart >= 0 and mstart != estart:
rstart = mstart + 2
if rstart < len(rhs) and rhs[rstart] == "{":
rstart = rstart + 1
term = "}"
elif rstart < len(rhs) and rhs[rstart] == "[":
rstart = rstart + 1
term = "]"
else:
term = string.whitespace
mend = rstart
while mend < len(rhs) and rhs[mend] not in term:
mend = mend + 1
lhs = rhs[rstart:mend]
if term in "}]":
mend = mend + 1
if term == "]":
# Assume a missing property expands to an empty string.
if properties is None:
value = ""
else:
value = properties.get(lhs, "")
else:
# We used to treat a missing value as an error, but Qt v4.3.0 has
# at least one case that refers to an undefined macro. If qmake
# handles it then this must be the correct behaviour.
value = macros.get(lhs, "")
rhs = rhs[:mstart] + value + rhs[mend:]
estart = rhs.find("$$(")
mstart = rhs.find("$$")
return rhs
def create_wrapper(script, wrapper, gui=0, use_arch=''):
"""Create a platform dependent executable wrapper around a Python script.
script is the full pathname of the script.
wrapper is the name of the wrapper file to create.
gui is non-zero if a GUI enabled version of the interpreter should be used.
use_arch is the MacOS/X architectures to invoke python with. Several space
separated architectures may be specified.
Returns the platform specific name of the wrapper.
"""
if sys.platform == "win32":
wrapper = wrapper + ".bat"
wf = open(wrapper, "w")
if sys.platform == "win32":
exe = sys.executable
if gui:
exe = exe[:-4] + "w.exe"
wf.write("@\"%s\" \"%s\" %%1 %%2 %%3 %%4 %%5 %%6 %%7 %%8 %%9\n" % (exe, script))
elif sys.platform == "darwin":
# The installation of MacOS's python is a mess that changes from
# version to version and where sys.executable is useless.
version = sys.version_info
py_major = version[0]
py_minor = version[1]
if gui:
# In Python v3.4 and later there is no pythonw.
if (py_major == 3 and py_minor >= 4) or py_major >= 4:
exe = "python"
else:
exe = "pythonw"
else:
exe = "python"
exe = "%s%d.%d" % (exe, py_major, py_minor)
if use_arch:
# Note that this may not work with the "standard" interpreter but
# should with the "pythonX.Y" version.
arch_flags = ' '.join(["-%s" % a for a in use_arch.split()])
exe = "arch %s %s" % (arch_flags, exe)
wf.write("#!/bin/sh\n")
wf.write("exec %s %s ${1+\"$@\"}\n" % (exe, script))
else:
wf.write("#!/bin/sh\n")
wf.write("exec %s %s ${1+\"$@\"}\n" % (sys.executable, script))
wf.close()
if sys.platform != "win32":
sbuf = os.stat(wrapper)
mode = sbuf.st_mode
mode |= (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
os.chmod(wrapper, mode)
return wrapper
| [
"[email protected]"
] | |
eb7705bd6b9d8e6677c1899be7ba4d2bdc3f42a1 | 368be25e37bafa8cc795f7c9f34e4585e017091f | /.history/app_fav_books/models_20201114185225.py | 1f3117e89b2e0690542e5f302aea450246571448 | [] | no_license | steven-halla/fav_books_proj | ebcfbfda0e7f3cdc49d592c86c633b1d331da513 | 512005deb84ac906c9f24d4ab0939bd0db096716 | refs/heads/master | 2023-03-30T09:37:38.016063 | 2021-04-02T20:27:22 | 2021-04-02T20:27:22 | 354,125,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,606 | py | from django.db import models
import re
class UserManager(models.Manager):
def basic_validator(self, post_data):
errors = {}
EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
if len(post_data['first_name']) < 3:
errors['first_name'] = "First name must be 3 characters"
if post_data['first_name'].isalpha() == False:
errors['first_name'] = "letters only"
if len(post_data['last_name']) < 3:
errors['last_name'] = "Last name must be 3 characters"
if post_data['last_name'].isalpha() == False:
errors['last_name'] = "letters only"
if len(post_data['email']) < 8:
errors['email'] = "Email must contain 8 characters"
if post_data['email'].find("@") == -1:
errors['email'] = "email must contain @ and .com"
if post_data['email'].find(".com") == -1:
errors['email'] = "email must contain @ and .com"
# test whether a field matches the pattern
if not EMAIL_REGEX.match(post_data['email']):
errors['email'] = "Invalid email address!"
if post_data['password'] != post_data['confirm_password']:
errors['pass_match'] = "password must match confirm password"
if len(post_data['password']) < 8:
errors['pass_length'] = "password must be longer than 8 characters"
return errors
# Create your models here.
class User(models.Model):
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
email = models.CharField(max_length=20)
password = models.CharField(max_length=20)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = UserManager()
class BooksManager(models.Manager):
def basic_validator(self, post_data):
errors = {}
if len(post_data['title']) < 1:
errors['title'] = "First name must be 1 characters"
if len(post_data['last_name']) < 5:
errors['desc'] = "Description must be 5 characters"
return errors
class Books(models.Model):
title = models.CharField(max_length=20)
desc = models.CharField(max_length=40)
uploaded_by = models.ForeignKey(User, related_name="books_uploaded", on_delete=models.CASCADE)
users_who_favorite = models.ManyToManyField(User, related_name="liked_books")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects=BooksManager
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.