blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d1cd60b8ac3a89b9dd0b4a456d9c166b93f4ffe5 | 67c5269fa4720cf728d4c1dd572c09d5e4e7a321 | /convert_mcnp71.py | db687aef0e14ec73a1479e0f9dc3959d89a76938 | [] | no_license | SamPUG/data | cff882327f5fe79ce2c2fca70d217173300c4f85 | 457755083bb8e05e58bbc3765f52bf8c756abb9c | refs/heads/master | 2020-12-19T14:57:12.806099 | 2020-03-06T08:30:47 | 2020-03-06T08:30:47 | 235,767,080 | 0 | 0 | null | 2020-02-25T14:43:04 | 2020-01-23T09:58:38 | Python | UTF-8 | Python | false | false | 4,330 | py | #!/usr/bin/env python3
import argparse
from collections import defaultdict
from pathlib import Path
import sys
import openmc.data
# Make sure Python version is sufficient
assert sys.version_info >= (3, 6), "Python 3.6+ is required"
description = """
Convert ENDF/B-VII.1 ACE data from the MCNP6 distribution into an HDF5 library
that can be used by OpenMC. This assumes that you have a directory containing
subdirectories 'endf71x' and 'ENDF71SaB'. Optionally, if a recent photoatomic
library (e.g., eprdata14) is available, it can also be converted using the
--photon argument.
"""
class CustomFormatter(argparse.ArgumentDefaultsHelpFormatter,
argparse.RawDescriptionHelpFormatter):
pass
parser = argparse.ArgumentParser(
description=description,
formatter_class=CustomFormatter
)
parser.add_argument('-d', '--destination', type=Path, default=Path('mcnp_endfb71'),
help='Directory to create new library in')
parser.add_argument('--libver', choices=['earliest', 'latest'],
default='earliest', help="Output HDF5 versioning. Use "
"'earliest' for backwards compatibility or 'latest' for "
"performance")
parser.add_argument('-p', '--photon', type=Path,
help='Path to photoatomic data library (eprdata12 or later)')
parser.add_argument('mcnpdata', type=Path,
help='Directory containing endf71x and ENDF71SaB')
args = parser.parse_args()
# Check arguments to make sure they're valid
assert args.mcnpdata.is_dir(), 'mcnpdata argument must be a directory'
if args.photon is not None:
assert args.photon.is_file(), 'photon argument must be an existing file'
# Get a list of all ACE files
endf71x = list(args.mcnpdata.glob('endf71x/*/*.7??nc'))
endf71sab = list(args.mcnpdata.glob('ENDF71SaB/*.??t'))
# Check for fixed H1 files and remove old ones if present
hydrogen = args.mcnpdata / 'endf71x' / 'H'
if (hydrogen / '1001.720nc').is_file():
for i in range(10, 17):
endf71x.remove(hydrogen / f'1001.7{i}nc')
# There's a bug in H-Zr at 1200 K
thermal = args.mcnpdata / 'ENDF71SaB'
endf71sab.remove(thermal / 'h-zr.27t')
# Check for updated TSL files and remove old ones if present
checks = [
('sio2', 10, range(20, 37)),
('u-o2', 30, range(20, 28)),
('zr-h', 30, range(20, 28))
]
for material, good, bad in checks:
if (thermal / f'{material}.{good}t').is_file():
for suffix in bad:
f = thermal / f'{material}.{suffix}t'
if f.is_file():
endf71sab.remove(f)
# Group together tables for the same nuclide
tables = defaultdict(list)
for p in sorted(endf71x + endf71sab):
tables[p.stem].append(p)
# Create output directory if it doesn't exist
(args.destination / 'photon').mkdir(parents=True, exist_ok=True)
library = openmc.data.DataLibrary()
for name, paths in sorted(tables.items()):
# Convert first temperature for the table
p = paths[0]
print(f'Converting: {p}')
if p.name.endswith('t'):
data = openmc.data.ThermalScattering.from_ace(p)
else:
data = openmc.data.IncidentNeutron.from_ace(p, 'mcnp')
# For each higher temperature, add cross sections to the existing table
for p in paths[1:]:
print(f'Adding: {p}')
if p.name.endswith('t'):
data.add_temperature_from_ace(p)
else:
data.add_temperature_from_ace(p, 'mcnp')
# Export HDF5 file
h5_file = args.destination / f'{data.name}.h5'
print(f'Writing {h5_file}...')
data.export_to_hdf5(h5_file, 'w', libver=args.libver)
# Register with library
library.register_file(h5_file)
# Handle photoatomic data
if args.photon is not None:
lib = openmc.data.ace.Library(args.photon)
for table in lib.tables:
# Convert first temperature for the table
print(f'Converting: {table.name}')
data = openmc.data.IncidentPhoton.from_ace(table)
# Export HDF5 file
h5_file = args.destination / 'photon' / f'{data.name}.h5'
print(f'Writing {h5_file}...')
data.export_to_hdf5(h5_file, 'w', libver=args.libver)
# Register with library
library.register_file(h5_file)
# Write cross_sections.xml
library.export_to_xml(args.destination / 'cross_sections.xml')
| [
"[email protected]"
] | |
ba45777ebf476d635254faf1c942e070055b6fc5 | c463e77c3d76e6b4810e202541d3f3f7f91bcf60 | /build/PCL-ROS-cluster-Segmentation/cmake/sensor_stick-genmsg-context.py | 31a011a3d2b1087f74bbb8bde784bccea1893805 | [] | no_license | MGRNascimento/Tese | 18087ee59dfee96ee000c9f16c646d1750174285 | bf78d417849a74d9c5a520d40dcbebeadf084706 | refs/heads/master | 2020-06-23T13:57:01.699657 | 2019-10-23T21:47:19 | 2019-10-23T21:47:19 | 198,638,709 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 992 | py | # generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/miguel/catkin_ws/src/PCL-ROS-cluster-Segmentation/msg/DetectedObjectsArray.msg;/home/miguel/catkin_ws/src/PCL-ROS-cluster-Segmentation/msg/DetectedObject.msg;/home/miguel/catkin_ws/src/PCL-ROS-cluster-Segmentation/msg/SegmentedClustersArray.msg"
services_str = "/home/miguel/catkin_ws/src/PCL-ROS-cluster-Segmentation/srv/GetNormals.srv"
pkg_name = "sensor_stick"
dependencies_str = "std_msgs;sensor_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "sensor_stick;/home/miguel/catkin_ws/src/PCL-ROS-cluster-Segmentation/msg;std_msgs;/opt/ros/kinetic/share/std_msgs/cmake/../msg;sensor_msgs;/opt/ros/kinetic/share/sensor_msgs/cmake/../msg;geometry_msgs;/opt/ros/kinetic/share/geometry_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/kinetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| [
"[email protected]"
] | |
7b6403c7efbad9fe1289f6a2236850d7a726f626 | eacff46eda2c6b509449979a16002b96d4645d8e | /Collections-a-installer/community-general-2.4.0/plugins/modules/awall.py | ca3979593c598ecae378543075eff676aa1be9d1 | [
"MIT",
"GPL-3.0-only",
"GPL-3.0-or-later"
] | permissive | d-amien-b/simple-getwordpress | 5e6d4d15d5f87124ab591e46b63fec552998fdc3 | da90d515a0aa837b633d50db4d91d22b031c04a2 | refs/heads/master | 2023-04-08T22:13:37.347545 | 2021-04-06T09:25:51 | 2021-04-06T09:25:51 | 351,698,069 | 0 | 0 | MIT | 2021-03-31T16:16:45 | 2021-03-26T07:30:00 | HTML | UTF-8 | Python | false | false | 15 | py | system/awall.py | [
"[email protected]"
] | |
c72ea0fdf63e7cab3cd12fac24e9a96fe75a01e2 | 50402cc4388dfee3a9dbe9e121ef217759ebdba8 | /etc/MOPSO-GP0/ZDT4.py | 1082e5005e8823de068729fbccebe4e6a539378f | [] | no_license | dqyi11/SVNBackup | bd46a69ec55e3a4f981a9bca4c8340944d8d5886 | 9ad38e38453ef8539011cf4d9a9c0a363e668759 | refs/heads/master | 2020-03-26T12:15:01.155873 | 2015-12-10T01:11:36 | 2015-12-10T01:11:36 | 144,883,382 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,609 | py | '''
Created on 2014-1-25
@author: Walter
'''
from SwarmND import *;
import numpy as np;
import sys;
if __name__ == '__main__':
def func1(x):
return x[0];
def func2(x):
sum = 0.0;
for i in range(2, 10):
sum += x[i]**2 - 10 * np.cos(4 * np.pi * x[i]);
g = 1 + 10 * 9 + sum;
h = 1 - np.sqrt(x[0]/g);
return g * h;
figFolder = sys.path[0];
figFolder = sys.path[0] + "\\zdt4";
paretoX = np.arange(0.0,1.0,0.005);
paretoY = np.zeros(len(paretoX));
localParetoY = np.zeros(len(paretoX));
paretoPos = [];
for i in range(len(paretoX)):
paretoY[i] = 1 - np.sqrt(paretoX[i]);
localParetoY[i] = 1 - np.sqrt(paretoX[i]/1.25);
fitPos = np.matrix(np.zeros((1,2), np.float));
fitPos[0,0] = paretoX[i];
fitPos[0,1] = paretoY[i];
paretoPos.append(fitPos);
swarm = SwarmND(100, 10);
swarm.setDisplayParam(600, 600, 20, 0.1)
swarm.setParam(2.0, 2.0, 0.8, [func1, func2]);
ws = [];
ws.append([0.0, 1.0]);
for i in range(1,10):
ws.append([-5.0, 5.0])
swarm.initParticles(ws);
swarm.paretoX = paretoX;
swarm.paretoY = paretoY;
swarm.localParetoX = paretoX;
swarm.localParetoY = localParetoY;
swarm.paretoPos = paretoPos;
runPlan = [30, 60, 80, 100];
count = 0;
for r in runPlan:
for t in range(r):
swarm.update();
count += 1;
swarm.plot(count, figFolder);
| [
"walter@e224401c-0ce2-47f2-81f6-2da1fe30fd39"
] | walter@e224401c-0ce2-47f2-81f6-2da1fe30fd39 |
7e15d512ec3c87a9d4dc6de189623ab45646f041 | efb3194a583cd79cc03dc91b9a96dfc0bdd3a344 | /stm32f/json_pkt.py | 8fab02dbeb225a6406222a1a16911d147abec342 | [
"Apache-2.0"
] | permissive | andersy005/capstone | 9227b0c19b4e16ea5e67a529937652408d0a35f2 | b4301ebc7c1447f3ce2ff034add985c1f417f065 | refs/heads/master | 2021-09-13T07:42:52.359116 | 2018-04-26T17:58:05 | 2018-04-26T17:58:05 | 118,843,216 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,517 | py | # This code should run fine on MicroPython or CPython.
#
# It allows objects which can be represented as JSON objects to be sent
# between two python programs (running on the same or different computers).
import json
from dump_mem import dump_mem
SOH = 0x01
STX = 0x02
ETX = 0x03
EOT = 0x04
# <SOH><LenLow><LenHigh><STX><PAYLOAD><ETX><LRC><EOT>
def lrc(str):
sum = 0
for b in str:
sum = (sum + b) & 0xff
return ((sum ^ 0xff) + 1) & 0xff
class JSON_Packet:
STATE_SOH = 0
STATE_LEN_0 = 1
STATE_LEN_1 = 2
STATE_STX = 3
STATE_PAYLOAD = 4
STATE_ETX = 5
STATE_LRC = 6
STATE_EOT = 7
def __init__(self, serial_port, show_packets=False):
self.serial_port = serial_port
self.show_packets = show_packets
self.pkt_len = 0
self.pkt_idx = 0
self.pkt = None
self.lrc = 0
self.state = JSON_Packet.STATE_SOH
def send(self, obj):
"""Converts a python object into its json representation and then sends
it using the 'serial_port' passed in the constructor.
"""
j_str = json.dumps(obj).encode('ascii')
j_len = len(j_str)
j_lrc = lrc(j_str)
hdr = bytearray((SOH, j_len & 0xff, j_len >> 8, STX))
ftr = bytearray((ETX, j_lrc, EOT))
if self.show_packets:
data = hdr + j_str + ftr
dump_mem(data, 'Send')
self.serial_port.write(hdr)
self.serial_port.write(j_str)
self.serial_port.write(ftr)
def process_byte(self, byte):
"""Processes a single byte. Returns a json object when one is
successfully parsed, otherwise returns None.
"""
if self.show_packets:
if byte >= ord(' ') and byte <= ord('~'):
print('Rcvd 0x%02x \'%c\'' % (byte, byte))
else:
print('Rcvd 0x%02x' % byte)
if self.state == JSON_Packet.STATE_SOH:
if byte == SOH:
self.state = JSON_Packet.STATE_LEN_0
elif self.state == JSON_Packet.STATE_LEN_0:
self.pkt_len = byte
self.state = JSON_Packet.STATE_LEN_1
elif self.state == JSON_Packet.STATE_LEN_1:
self.pkt_len += (byte << 8)
self.state = JSON_Packet.STATE_STX
elif self.state == JSON_Packet.STATE_STX:
if byte == STX:
self.state = JSON_Packet.STATE_PAYLOAD
self.pkt_idx = 0
self.pkt = bytearray(self.pkt_len)
self.lrc = 0
else:
self.state = JSON_Packet.STATE_SOH
elif self.state == JSON_Packet.STATE_PAYLOAD:
self.pkt[self.pkt_idx] = byte
self.lrc = (self.lrc + byte) & 0xff
self.pkt_idx += 1
if self.pkt_idx >= self.pkt_len:
self.state = JSON_Packet.STATE_ETX
elif self.state == JSON_Packet.STATE_ETX:
if byte == ETX:
self.state = JSON_Packet.STATE_LRC
else:
self.state = JSON_Packet.STATE_SOH
elif self.state == JSON_Packet.STATE_LRC:
self.lrc = ((self.lrc ^ 0xff) + 1) & 0xff
if self.lrc == byte:
self.state = JSON_Packet.STATE_EOT
else:
self.state = JSON_Packet.STATE_SOH
elif self.state == JSON_Packet.STATE_EOT:
self.state = JSON_Packet.STATE_SOH
if byte == EOT:
return json.loads(str(self.pkt, 'ascii'))
| [
"[email protected]"
] | |
fefa4008d3c6a8622e01e84a315130f060863036 | 2a54e8d6ed124c64abb9e075cc5524bb859ba0fa | /.history/2-Python-Basics-part2/6-Logical-operators_20200414002000.py | 7a4ee8fd3c96f8e57b7e41dc522b12fb81613bec | [] | no_license | CaptainStorm21/Python-Foundation | 01b5fbaf7a913506518cf22e0339dd948e65cea1 | a385adeda74f43dd7fb2d99d326b0be23db25024 | refs/heads/master | 2021-05-23T01:29:18.885239 | 2020-04-23T19:18:06 | 2020-04-23T19:18:06 | 253,171,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | # Short Circuting
is_Friend = True
is_User = True
if is_Friend or is_User:
print("both are true")
if is_Friend and is_User:
print("both are true")
age = 15
year = 2019
boy = "Vlad" | [
"[email protected]"
] | |
9afc659a83985ca5e7a34f87ceb3a5de075cc25b | 5a3b070f39715f604a8bfc38888b6ee4382e54ac | /TalkTalk-Server/app.py | aa21f179f70f37f987a80665e81a7a672d8cc074 | [] | no_license | aupaprunia/talktalk | 717245ec0378559abf2dba0793822d19613faf57 | 895418aa25ad154449f4036362a77b615092b00b | refs/heads/main | 2023-04-13T03:53:37.361677 | 2021-04-11T19:08:54 | 2021-04-11T19:08:54 | 356,480,824 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,022 | py | from flask import Flask, request
import pyrebase
import requests
choice_dict = {1:"Sad", 2: "Happy", 3: "Angry", 4: "Excited"}
config = {"apiKey": "AIzaSyBrey3ZZ5X74WrAQuj7HISWLl70PqP8dnA",
"authDomain": "trialproject-55deb.firebaseapp.com",
"databaseURL": "https://trialproject-55deb-default-rtdb.firebaseio.com",
"projectId": "trialproject-55deb",
"storageBucket": "trialproject-55deb.appspot.com",
"messagingSenderId": "930590452475",
"appId": "1:930590452475:web:d8857d9906874468fd5e5e"
}
firebase = pyrebase.initialize_app(config)
auth = firebase.auth()
db = firebase.database()
app = Flask(__name__)
# @app.route('/signup', methods =['GET'])
# def signup():
# register = request.get_json()
# email = register['email']
# password = register['password']
# auth.create_user_with_email_and_password(email, password)
# return {"status": " success", "email": email, "password": password}
@app.route('/signin/<string:email>/<string:password>', methods = ['GET'])
def signin(email, password):
try:
result = auth.sign_in_with_email_and_password(email, password)
global userId
userId = result['localId']
get_token = db.child("Users").child(userId).get()
global token
token = get_token.val()['token']
name = get_token.val()['name']
return{"token": token, "status": 1, "name": name}
except:
return {"status": 0}
@app.route('/speaker/<int:choice>', methods = ["GET"])
def speaker(choice):
try:
users = db.child("Online").child("Listener").child(choice_dict[choice]).get()
uid = ""
flag = True
for key in users.val():
if flag == True:
uid = key
flag = False
db.child("Online").child("Listener").child(choice_dict[choice]).child(uid).child("status").set("1")
db.child("Users").child(userId).child("token").set(token-1)
url = "https://fcm.googleapis.com/fcm/send"
payload="{\r\n \"to\":\"/topics/"+userId+",\r\n \"data\": {\r\n \"title\": \"Alert\",\r\n \"body\": \"You have an incoming call...\"\r\n }\r\n}"
headers = {'Authorization': 'key=AAAA2KuDavs:APA91bGCwqzJYQntRNVZU4WfjDh71D2kLvI4ei3iXr9BIlrz-lzp3HdzZWKAWghUwZK0i1rvC0RKFl2rdk1uyAf3RozvlPO1snRvwYpxJVz5qAH5keFgzygj8h16D0g-YDHrz6SoqJfh',
'Content-Type': 'application/json'}
response = requests.request("POST", url, headers=headers, data=payload)
print(response)
return {"channel_name": uid, "status":1}
except:
return {"message": "No Listner available. Try reconnecting later.", "status":0}
@app.route('/listner/<int:choice>', methods = ["GET"])
def push_listner(choice):
db.child("Online").child("Listener").child(choice_dict[choice]).child(userId).child("status").set("0")
db.child("Online").child("Listener").child(choice_dict[choice]).child(userId).child("uid").set(userId)
db.child("Users").child(userId).child("token").set(token+1)
return {"status" : 1, "message": "You will be connected to a speaker shortly."}
if __name__ == '__main__':
app.run(debug = True) | [
"="
] | = |
08dfeef07dc2184dd58ed15584e4a9d792be3383 | 3a8c2bd3b8df9054ed0c26f48616209859faa719 | /Challenges/Hackerrank-DynamicArray.py | c63264cadb5c93066503209dd51764b1eaa68ce0 | [] | no_license | AusCommsteam/Algorithm-and-Data-Structures-and-Coding-Challenges | 684f1ca2f9ee3c49d0b17ecb1e80707efe305c82 | 98fb752c574a6ec5961a274e41a44275b56da194 | refs/heads/master | 2023-09-01T23:58:15.514231 | 2021-09-10T12:42:03 | 2021-09-10T12:42:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | #!/bin/python3
import os
import sys
#
# Complete the dynamicArray function below.
#
def dynamicArray(n, queries):
#
# Write your code here.
#
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
nq = input().split()
n = int(nq[0])
q = int(nq[1])
queries = []
for _ in range(q):
queries.append(list(map(int, input().rstrip().split())))
result = dynamicArray(n, queries)
fptr.write('\n'.join(map(str, result)))
fptr.write('\n')
fptr.close()
| [
"[email protected]"
] | |
bd080db414250c7460293da72e2625c463127dcf | 55a4d7ed3ad3bdf89e995eef2705719ecd989f25 | /main/tensorflow_test/hmm_天气_活动理解.py | 1318a13a359255ef5e47ef393f656642d7456de5 | [] | no_license | ichoukou/Bigdata | 31c1169ca742de5ab8c5671d88198338b79ab901 | 537d90ad24eff4742689eeaeabe48c6ffd9fae16 | refs/heads/master | 2020-04-17T04:58:15.532811 | 2018-12-11T08:56:42 | 2018-12-11T08:56:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,842 | py | # coding:utf-8
states = ('Rainy', 'Sunny')
observations = ('walk', 'shop', 'clean')
start_probability = {'Rainy': 0.6, 'Sunny': 0.4}
transition_probability = {
'Rainy': {'Rainy': 0.7, 'Sunny': 0.3},
'Sunny': {'Rainy': 0.4, 'Sunny': 0.6},
}
emission_probability = {
'Rainy': {'walk': 0.1, 'shop': 0.4, 'clean': 0.5},
'Sunny': {'walk': 0.6, 'shop': 0.3, 'clean': 0.1},
}
# 打印路径概率表
def print_dptable(V):
print " ",
for i in range(len(V)): print "%7d" % i,
print
for y in V[0].keys():
print "%.10s: " % y,
for t in range(len(V)):
print "%.12s" % ("%f" % V[t][y]),
print
def viterbi(obs, states, start_p, trans_p, emit_p):
"""
:param obs:观测序列
:param states:隐状态
:param start_p:初始概率(隐状态)
:param trans_p:转移概率(隐状态)
:param emit_p: 发射概率 (隐状态表现为显状态的概率)
:return:
"""
# 路径概率表 V[时间][隐状态] = 概率
V = [{}]
# 一个中间变量,代表当前状态是哪个隐状态
path = {}
# 初始化初始状态 (t == 0)
for y in states:
V[0][y] = start_p[y] * emit_p[y][obs[0]]
path[y] = [y]
# 对 t > 0 跑一遍维特比算法
for t in range(1, len(obs)): # [1,2]
V.append({})
newpath = {}
for y in states:
# 概率 隐状态 = 前状态是y0的概率 * y0转移到y的概率 * y表现为当前状态的概率
# print [(V[t - 1][y0] * trans_p[y0][y] * emit_p[y][obs[t]], y0) for y0 in states]
#计算当前循环下,天气为y的概率,可由前一天是阴天、晴天两种情况得来,但是取概率最大的作为当前链。
(prob, state) = max([(V[t - 1][y0] * trans_p[y0][y] * emit_p[y][obs[t]], y0) for y0 in states])
# 记录最大概率
V[t][y] = prob
print V
# 更新晴天、雨天的路径,更新当前为晴天、雨天的链路径,最后一个
newpath[y] = path[state] + [y]
# print newpath
# 不需要保留旧路径
path = newpath
#打印列表,每天的晴天、阴天的最大的概率值输出,作为后一天晴天、阴天的输入。总之每天的计算输出,只保留“一个”晴天的输出和阴天的输出。
print_dptable(V)
(prob, state) = max([(V[len(obs) - 1][y], y) for y in states])
return (prob, path[state])
def example():
return viterbi(observations,
states,
start_probability,
transition_probability,
emission_probability)
#注意: max([(4,'hello'),(3,'hello'),(10,'hello')]) 比较的是[]中的()中第一个数值!!!!!!!
print example()
| [
"[email protected]"
] | |
a25a9a45abf6afeb485d96f23c00c3d70ff087dc | b8f9d2cafb8958cdb417f05156acb6aadf90f4dd | /MachineLearning/NetworkAnalysis/PageRank.py | 5d5647d240c30f7abe41a25e7aa9ec6bbe87407e | [] | no_license | Anova07/Data-Science | 8d14f78236de0053e2d31cc8cd85b9c70dfa2c8a | 86dd24fb04a199536ae8f3f5f843aae3fc69c086 | refs/heads/master | 2021-12-08T10:35:35.512188 | 2016-03-06T19:08:58 | 2016-03-06T19:08:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,945 | py | import math, random, re
from collections import defaultdict, Counter, deque
from LinearUtils.Vectors import dotProduct, magnitude, scalarMultiply, shape, distance
from LinearUtils.Matrices import getRow, getCol, generateMatrix
from functools import partial
# Code from Data Science from Scratch - github
users = [
{ "id": 0, "name": "Hero" },
{ "id": 1, "name": "Dunn" },
{ "id": 2, "name": "Sue" },
{ "id": 3, "name": "Chi" },
{ "id": 4, "name": "Thor" },
{ "id": 5, "name": "Clive" },
{ "id": 6, "name": "Hicks" },
{ "id": 7, "name": "Devin" },
{ "id": 8, "name": "Kate" },
{ "id": 9, "name": "Klein" }
]
friendships = [(0, 1), (0, 2), (1, 2), (1, 3), (2, 3), (3, 4),
(4, 5), (5, 6), (5, 7), (6, 8), (7, 8), (8, 9)]
# give each user a friends list
for user in users:
user["friends"] = []
# and fill it
for i, j in friendships:
users[i]["friends"].append(users[j]) # add i as a friend of j
users[j]["friends"].append(users[i]) # add j as a friend of i
endorsements = [(0, 1), (1, 0), (0, 2), (2, 0), (1, 2), (2, 1), (1, 3),
(2, 3), (3, 4), (5, 4), (5, 6), (7, 5), (6, 8), (8, 7), (8, 9)]
def PageRank(users, damping = 0.85, num_iters = 100):
"""
A simplified version looks like this:
1. There is a total of 1.0 (or 100%) PageRank in the network.
2. Initially this PageRank is equally distributed among nodes.
3. At each step, a large fraction of each node’s PageRank is distributed evenly among its outgoing links.
4. At each step, the remainder of each node’s PageRank is distributed evenly among all nodes.
"""
# initially distribute PageRank evenly
num_users = len(users)
pr = { user["id"] : 1 / num_users for user in users }
# this is the small fraction of PageRank
# that each node gets each iteration
base_pr = (1 - damping) / num_users
for __ in range(num_iters):
next_pr = { user["id"] : base_pr for user in users }
for user in users:
# distribute PageRank to outgoing links
links_pr = pr[user["id"]] * damping
for endorsee in user["endorses"]:
next_pr[endorsee["id"]] += links_pr / len(user["endorses"])
pr = next_pr
return pr
if __name__ == "__main__":
for user in users:
user["endorses"] = [] # add one list to track outgoing endorsements
user["endorsed_by"] = [] # and another to track endorsements
for source_id, target_id in endorsements:
users[source_id]["endorses"].append(users[target_id])
users[target_id]["endorsed_by"].append(users[source_id])
endorsements_by_id = [(user["id"], len(user["endorsed_by"]))
for user in users]
sorted(endorsements_by_id, key=lambda pair: pair[1], reverse=True)
print("PageRank")
for user_id, pr in PageRank(users).items():
print(user_id, pr)
| [
"[email protected]"
] | |
c7ef812fb6b1c0a1bcbf2e8e463e19da84748944 | 6b265b404d74b09e1b1e3710e8ea872cd50f4263 | /Python/Exercises/TreeChecker/check_tree_2.0.py | 857bec02ba2b491a4a9f7d5ad9e1b2461082a30e | [
"CC-BY-4.0"
] | permissive | gjbex/training-material | cdc189469ae2c7d43784ecdcb4bcca10ecbc21ae | e748466a2af9f3388a8b0ed091aa061dbfc752d6 | refs/heads/master | 2023-08-17T11:02:27.322865 | 2023-04-27T14:42:55 | 2023-04-27T14:42:55 | 18,587,808 | 130 | 60 | CC-BY-4.0 | 2023-08-03T07:07:25 | 2014-04-09T06:35:58 | Jupyter Notebook | UTF-8 | Python | false | false | 1,828 | py | #!/usr/bin/env python
import sys
class BaseError(Exception):
def __init__(self, position):
super().__init__()
self._position = position
@property
def position(self):
return self._position
def __str__(self):
return self.message
class MissingRBError(BaseError):
def __init__(self, position):
super().__init__(position)
msg = 'missing right bracket for bracket at {0}'
self.message = msg.format(position)
class MissingLBError(BaseError):
def __init__(self, position):
super().__init__(position)
msg = 'missing left bracket for bracket at {0}'
self.message = msg.format(position)
class TrailingCharsError(BaseError):
def __init__(self, position):
super().__init__(position)
self.message = 'trailing characters at position {0}'.format(position)
def check_tree(tree):
bracket_positions = []
position = 1
for character in tree:
if character == '(':
bracket_positions.append(position)
elif character == ')':
if bracket_positions:
bracket_positions.pop()
else:
raise MissingLBError(position)
if len(bracket_positions) == 0:
break
position += 1
if len(bracket_positions) == 0 and position < len(tree) - 1:
raise TrailingCharsError(position + 1)
elif len(bracket_positions) > 0:
raise MissingRBError(bracket_positions.pop())
def main():
tree = ''.join([line.strip() for line in sys.stdin.readlines()])
try:
check_tree(tree)
except BaseError as error:
sys.stderr.write('### error: {0}\n'.format(str(error)))
return 1
else:
return 0
if __name__ == '__main__':
status = main()
sys.exit(status)
| [
"[email protected]"
] | |
35d3072fb03715d46ecb63d4005ca431e4838776 | b42850bc3e36bbd1683070393582617f2b3cd8e6 | /Inheritance/players_and_monsters/muse_elf.py | a9582d2cc187778ca11c8be953479c42fb935ab3 | [] | no_license | marianidchenko/Python_OOP | aecca18be6df3850c0efbf2fa6d25bf3ff53ae96 | 547c12cbdad5b8c16fa55bba6c03b71db181ad2b | refs/heads/main | 2023-07-09T05:42:43.863681 | 2021-08-14T14:55:51 | 2021-08-14T14:55:51 | 381,572,168 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 84 | py | from Inheritance.players_and_monsters.elf import Elf
class MuseElf(Elf):
pass
| [
"[email protected]"
] | |
658b34c8593e518f6e856b6afb5c1d107b89f6bc | 98f1a0bfa5b20a0b81e9e555d76e706c62d949c9 | /examples/pytorch/stgcn_wave/model.py | 2463721f1b38ea34e09db1c8e3b064a7db69e439 | [
"Apache-2.0"
] | permissive | dmlc/dgl | 3a8fbca3a7f0e9adf6e69679ad62948df48dfc42 | bbc8ff6261f2e0d2b5982e992b6fbe545e2a4aa1 | refs/heads/master | 2023-08-31T16:33:21.139163 | 2023-08-31T07:49:22 | 2023-08-31T07:49:22 | 130,375,797 | 12,631 | 3,482 | Apache-2.0 | 2023-09-14T15:48:24 | 2018-04-20T14:49:09 | Python | UTF-8 | Python | false | false | 3,480 | py | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.init as init
from dgl.nn.pytorch import GraphConv
from dgl.nn.pytorch.conv import ChebConv
class TemporalConvLayer(nn.Module):
"""Temporal convolution layer.
arguments
---------
c_in : int
The number of input channels (features)
c_out : int
The number of output channels (features)
dia : int
The dilation size
"""
def __init__(self, c_in, c_out, dia=1):
super(TemporalConvLayer, self).__init__()
self.c_out = c_out
self.c_in = c_in
self.conv = nn.Conv2d(
c_in, c_out, (2, 1), 1, dilation=dia, padding=(0, 0)
)
def forward(self, x):
return torch.relu(self.conv(x))
class SpatioConvLayer(nn.Module):
def __init__(self, c, Lk): # c : hidden dimension Lk: graph matrix
super(SpatioConvLayer, self).__init__()
self.g = Lk
self.gc = GraphConv(c, c, activation=F.relu)
# self.gc = ChebConv(c, c, 3)
def init(self):
stdv = 1.0 / math.sqrt(self.W.weight.size(1))
self.W.weight.data.uniform_(-stdv, stdv)
def forward(self, x):
x = x.transpose(0, 3)
x = x.transpose(1, 3)
output = self.gc(self.g, x)
output = output.transpose(1, 3)
output = output.transpose(0, 3)
return torch.relu(output)
class FullyConvLayer(nn.Module):
def __init__(self, c):
super(FullyConvLayer, self).__init__()
self.conv = nn.Conv2d(c, 1, 1)
def forward(self, x):
return self.conv(x)
class OutputLayer(nn.Module):
def __init__(self, c, T, n):
super(OutputLayer, self).__init__()
self.tconv1 = nn.Conv2d(c, c, (T, 1), 1, dilation=1, padding=(0, 0))
self.ln = nn.LayerNorm([n, c])
self.tconv2 = nn.Conv2d(c, c, (1, 1), 1, dilation=1, padding=(0, 0))
self.fc = FullyConvLayer(c)
def forward(self, x):
x_t1 = self.tconv1(x)
x_ln = self.ln(x_t1.permute(0, 2, 3, 1)).permute(0, 3, 1, 2)
x_t2 = self.tconv2(x_ln)
return self.fc(x_t2)
class STGCN_WAVE(nn.Module):
def __init__(
self, c, T, n, Lk, p, num_layers, device, control_str="TNTSTNTST"
):
super(STGCN_WAVE, self).__init__()
self.control_str = control_str # model structure controller
self.num_layers = len(control_str)
self.layers = nn.ModuleList([])
cnt = 0
diapower = 0
for i in range(self.num_layers):
i_layer = control_str[i]
if i_layer == "T": # Temporal Layer
self.layers.append(
TemporalConvLayer(c[cnt], c[cnt + 1], dia=2**diapower)
)
diapower += 1
cnt += 1
if i_layer == "S": # Spatio Layer
self.layers.append(SpatioConvLayer(c[cnt], Lk))
if i_layer == "N": # Norm Layer
self.layers.append(nn.LayerNorm([n, c[cnt]]))
self.output = OutputLayer(c[cnt], T + 1 - 2 ** (diapower), n)
for layer in self.layers:
layer = layer.to(device)
def forward(self, x):
for i in range(self.num_layers):
i_layer = self.control_str[i]
if i_layer == "N":
x = self.layers[i](x.permute(0, 2, 3, 1)).permute(0, 3, 1, 2)
else:
x = self.layers[i](x)
return self.output(x)
| [
"[email protected]"
] | |
f661b97983d5da36c5d8f23356b77bb41fdbff71 | dd05972a3bf9d15f332fbff420f10afe1977c0d8 | /competition/base_example/aliceTest.py | 76fec14b823615e7488647e1a92bf8e51c2b7006 | [
"BSD-2-Clause"
] | permissive | StephanieWehner/QI-Competition2018 | b70df8c5bb343c534c2c0bd8fc0e7d6bb6183f25 | cc1139c81e39f66b77c046414bcac8de45807557 | refs/heads/master | 2020-03-23T05:45:09.885955 | 2018-08-08T20:03:29 | 2018-08-08T20:03:29 | 141,164,280 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,108 | py | #
# Copyright (c) 2017, Stephanie Wehner and Axel Dahlberg
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. All advertising materials mentioning features or use of this software
# must display the following acknowledgement:
# This product includes software developed by Stephanie Wehner, QuTech.
# 4. Neither the name of the QuTech organization nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from SimulaQron.general.hostConfig import *
from SimulaQron.cqc.backend.cqcHeader import *
from SimulaQron.cqc.pythonLib.cqc import *
from SimulaQron.toolbox.measurements import parity_meas
import random
#####################################################################################################
#
# main
#
def main():
# Initialize the connection
Alice = CQCConnection("Alice")
# Create EPR pairs
q1 = Alice.createEPR("Bob")
q2 = Alice.createEPR("Bob")
# Make sure we order the qubits consistently with Bob
# Get entanglement IDs
q1_ID = q1.get_entInfo().id_AB
q2_ID = q2.get_entInfo().id_AB
if q1_ID < q2_ID:
qa = q1
qc = q2
else:
qa = q2
qc = q1
# Get row
row = 0
# Perform the three measurements
if row == 0:
m0 = parity_meas([qa, qc], "XI", Alice)
m1 = parity_meas([qa, qc], "XX", Alice)
m2 = parity_meas([qa, qc], "IX", Alice)
else:
m0 = 0
m1 = 0
m2 = 0
print("\n")
print("==========================")
print("App {}: row is:".format(Alice.name))
for _ in range(row):
print("(___)")
print("({}{}{})".format(m0, m1, m2))
for _ in range(2-row):
print("(___)")
print("==========================")
print("\n")
# Clear qubits
qa.measure()
qc.measure()
# Stop the connections
Alice.close()
##################################################################################################
main()
| [
"[email protected]"
] | |
986bf659063dbb4023eaaf094cd1d3cccd06ebdb | 44dbb043e52f00c9a797b1bea8f1df50dd621842 | /os-example-4.py | 69064074cfa33ba2ae8384a237bc9351ebad664a | [] | no_license | peterdocter/standardmodels | 140c238d3bef31db59641087e3f3d5413d4baba1 | 7addc313c16b416d0970461998885833614570ad | refs/heads/master | 2020-12-30T16:59:30.489486 | 2016-12-13T06:32:03 | 2016-12-13T06:32:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | import os
# where are we?
cwd = os.getcwd()
print "1", cwd
# go down
os.chdir("samples")
print "2", os.getcwd()
# go back up
os.chdir(os.pardir)
print "3", os.getcwd() | [
"[email protected]"
] | |
235af1bbc670e956e37e472b363d092d53a2e10f | 7927424f1983eecc7c7b2f0ebaf61ad552d2a7e7 | /zigzag.py | 1e4ea4b1030d84d3446c45f2f19960e1f1f9aafc | [] | no_license | 6reg/automate | 295931d3ecf0e69e01921cc45d452fadfd1e6581 | 11e5de461ece3d8d111f3dc13de088788baf19a2 | refs/heads/main | 2023-03-08T18:39:42.991280 | 2021-02-22T20:53:13 | 2021-02-22T20:53:13 | 334,780,031 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 746 | py | import time, sys
indent = 0 # How many spaces to indent
indentIncreasing = True # Whether the indentation is increasing or not
try:
while True: # The main program loop.
print(' ' * indent, end='')
print('********')
time.sleep(0.1) # Pause for the 1/10 of a second.
if indentIncreasing:
# Increase the number of spaces:
indent = indent + 1
if indent == 20:
# Change direction:
indentIncreasing = False
else:
# Decrease the number of spaces:
indent = indent - 1
if indent == 0:
# Change direction:
indentIncreasing = True
except KeyboardInterrupt:
sys.exit()
| [
"[email protected]"
] | |
696193e4863c900c995b49d8854b2fd947ef2ebd | 9dc21ebb553fd116826c7cbae7d8c5eba47423d1 | /cloneGraph.py | 81681ac2a31cf11b69ac78e24d755d692f4aee77 | [] | no_license | KJSui/leetcode-2020 | a475a8b8481231757222c5afaad2856a92572f89 | 37cf89e7fb1351b1deff09271d9bb5852395054e | refs/heads/main | 2023-04-05T19:46:25.647605 | 2021-05-06T20:40:06 | 2021-05-06T20:40:06 | 365,031,592 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 426 | py | class Solution:
def __init__(self):
self.copy = {}
def cloneGraph(self, node):
if not node:
return None
newNode = Node(node.val)
neight = []
for i in neight:
if i in self.copy:
neight.append(self.copy[i])
else:
neight.append(self.cloneGraph(i))
newNode.neighbors = neight
return newNode
| [
"[email protected]"
] | |
95b2abdf3b691a753c2587061a681df8fd8851d1 | bb33e6be8316f35decbb2b81badf2b6dcf7df515 | /source/res/scripts/client/messenger/proto/xmpp/extensions/chat.py | 567a173fdee232fd567d9e3a472d0a0c272f68b0 | [] | no_license | StranikS-Scan/WorldOfTanks-Decompiled | 999c9567de38c32c760ab72c21c00ea7bc20990c | d2fe9c195825ececc728e87a02983908b7ea9199 | refs/heads/1.18 | 2023-08-25T17:39:27.718097 | 2022-09-22T06:49:44 | 2022-09-22T06:49:44 | 148,696,315 | 103 | 39 | null | 2022-09-14T17:50:03 | 2018-09-13T20:49:11 | Python | UTF-8 | Python | false | false | 9,509 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/messenger/proto/xmpp/extensions/chat.py
import calendar
from datetime import datetime
import json
import time
from debug_utils import LOG_CURRENT_EXCEPTION
from messenger.proto.xmpp.extensions import PyExtension, PyHandler, PyQuery
from messenger.proto.xmpp.extensions.dataform import DataForm, Field
from messenger.proto.xmpp.extensions.ext_constants import XML_NAME_SPACE as _NS
from messenger.proto.xmpp.extensions.ext_constants import XML_TAG_NAME as _TAG
from messenger.proto.xmpp.extensions.shared_handlers import IQHandler
from messenger.proto.xmpp.extensions.shared_queries import MessageQuery
from messenger.proto.xmpp.extensions.shared_queries import PresenceQuery
from messenger.proto.xmpp.extensions.wg_items import WgSharedExtension
from messenger.proto.xmpp.gloox_constants import IQ_TYPE, CHAT_STATE, MESSAGE_TYPE_ATTR, PRESENCE
from messenger.proto.xmpp.wrappers import ChatMessage
class ChatStateExtension(PyExtension):
def __init__(self, state=CHAT_STATE.UNDEFINED):
super(ChatStateExtension, self).__init__(state)
self.setXmlNs(_NS.CHAT_STATES)
@classmethod
def getDefaultData(cls):
return CHAT_STATE.UNDEFINED
def getXPath(self, index=None, suffix='', name=None):
if self.getName() == CHAT_STATE.UNDEFINED:
paths = []
getXPath = super(ChatStateExtension, self).getXPath
for state in CHAT_STATE.RANGE:
paths.append(getXPath(index, suffix, state))
name = paths
else:
name = super(ChatStateExtension, self).getXPath(index, suffix, name)
return name
def parseTag(self, pyGlooxTag):
result = pyGlooxTag.filterXPath('|'.join(CHAT_STATE.RANGE))
if result:
state = result[0].getTagName()
if state not in CHAT_STATE.RANGE:
state = self.getDefaultData()
else:
state = self.getDefaultData()
return state
class DelayExtension(PyExtension):
def __init__(self):
super(DelayExtension, self).__init__(_TAG.DELAY)
self.setXmlNs(_NS.DELAY)
@classmethod
def getDefaultData(cls):
return time.time()
def parseTag(self, pyGlooxTag):
stamp = pyGlooxTag.findAttribute('stamp')
if stamp:
try:
tm = time.strptime(stamp, '%Y-%m-%dT%H:%M:%SZ')
tm = tm[0:8] + (0,)
sentAt = calendar.timegm(tm)
except ValueError:
try:
dt = datetime.strptime(stamp, '%Y-%m-%dT%H:%M:%S.%fZ')
sentAt = calendar.timegm(dt.timetuple()) + dt.microsecond / 1000000.0
except ValueError:
LOG_CURRENT_EXCEPTION()
sentAt = self.getDefaultData()
else:
sentAt = self.getDefaultData()
return sentAt
class MessageIDExtension(PyExtension):
def __init__(self):
super(MessageIDExtension, self).__init__(_TAG.WG_MESSAGE_ID)
self.setXmlNs(_NS.WG_MESSAGE_ID)
@classmethod
def getDefaultData(cls):
pass
def parseTag(self, pyGlooxTag):
return pyGlooxTag.findAttribute('uuid')
class ChatHistoryQuery(PyExtension):
def __init__(self, jid, limit):
super(ChatHistoryQuery, self).__init__(_TAG.QUERY)
self.setXmlNs(_NS.WG_PRIVATE_HISTORY)
self.setAttribute('with', str(jid))
self.setAttribute('limit', limit)
class PrivateHistoryItem(PyExtension):
def __init__(self):
super(PrivateHistoryItem, self).__init__(_TAG.WG_PRIVATE_HISTORY)
self.setXmlNs(_NS.WG_PRIVATE_HISTORY)
@classmethod
def getDefaultData(cls):
return ('', False)
def parseTag(self, pyGlooxTag):
requestID = pyGlooxTag.findAttribute('request-id')
isFinal = pyGlooxTag.findAttribute('final')
if isFinal:
isFinal = json.loads(isFinal)
else:
isFinal = False
return (requestID, isFinal)
class _MucPrivilegesExtension(PyExtension):
def __init__(self, affiliation='', role=''):
super(_MucPrivilegesExtension, self).__init__(_TAG.WG_MUC_PRIVILEGES)
self.setAttribute('affiliation', affiliation)
self.setAttribute('role', role)
@classmethod
def getDefaultData(cls):
pass
def parseTag(self, pyGlooxTag):
affiliation = pyGlooxTag.findAttribute('affiliation') or 'none'
role = pyGlooxTag.findAttribute('role') or 'none'
return (affiliation, role)
class MessageWgSharedExtension(WgSharedExtension):
def __init__(self, includeNS=True):
super(MessageWgSharedExtension, self).__init__(includeNS)
self.setChild(_MucPrivilegesExtension())
@classmethod
def getDefaultData(cls):
return super(MessageWgSharedExtension, cls).getDefaultData()
def parseTag(self, pyGlooxTag):
info = super(MessageWgSharedExtension, self).parseTag(pyGlooxTag)
affiliation, role = self._getChildData(pyGlooxTag, 0, _MucPrivilegesExtension.getDefaultData())
info['affiliation'] = affiliation
info['role'] = role
return info
class _MessageCustomExtension(PyExtension):
def __init__(self, msgType, state=CHAT_STATE.UNDEFINED):
super(_MessageCustomExtension, self).__init__(_TAG.MESSAGE)
self.setAttribute('type', msgType)
self.setChild(ChatStateExtension(state))
self.setChild(MessageWgSharedExtension(False))
self.setChild(DelayExtension())
self.setChild(MessageIDExtension())
self.setChild(PrivateHistoryItem())
@classmethod
def getDefaultData(cls):
return ChatMessage()
def parseTag(self, pyGlooxTag):
message = ChatMessage()
message.state = self._getChildData(pyGlooxTag, 0, ChatStateExtension.getDefaultData())
info = self._getChildData(pyGlooxTag, 1, MessageWgSharedExtension.getDefaultData())
if info:
message.accountDBID = info['dbID']
message.accountName = info['name']
message.accountRole = info['role']
message.accountAffiliation = info['affiliation']
message.sentAt = self._getChildData(pyGlooxTag, 2, DelayExtension.getDefaultData())
message.uuid = self._getChildData(pyGlooxTag, 3, MessageIDExtension.getDefaultData())
message.requestID, message.isFinalInHistory = self._getChildData(pyGlooxTag, 4, PrivateHistoryItem.getDefaultData())
return message
class ChatMessageHolder(MessageQuery):
def __init__(self, msgType, to, msgBody='', state=CHAT_STATE.UNDEFINED):
if state:
ext = ChatStateExtension(state)
else:
ext = None
super(ChatMessageHolder, self).__init__(msgType, to, msgBody, ext)
return
class MessageHandler(PyHandler):
__slots__ = ('_typeAttr',)
def __init__(self, typeAttr):
self._typeAttr = typeAttr
super(MessageHandler, self).__init__(_MessageCustomExtension(self._typeAttr, CHAT_STATE.UNDEFINED))
def getFilterString(self):
return "/{0}[@type='{1}']".format(self._ext.getName(), self._typeAttr)
class ChatMessageHandler(MessageHandler):
def __init__(self):
super(ChatMessageHandler, self).__init__(MESSAGE_TYPE_ATTR.CHAT)
class GetChatHistoryQuery(PyQuery):
def __init__(self, jid, limit):
super(GetChatHistoryQuery, self).__init__(IQ_TYPE.GET, ChatHistoryQuery(jid, limit))
class MUCEntryQuery(PresenceQuery):
def __init__(self, to):
super(MUCEntryQuery, self).__init__(PRESENCE.AVAILABLE, to)
class MUCLeaveQuery(PresenceQuery):
def __init__(self, to):
super(MUCLeaveQuery, self).__init__(PRESENCE.UNAVAILABLE, to)
class OwnerConfigurationForm(PyExtension):
def __init__(self, fields=None):
super(OwnerConfigurationForm, self).__init__(_TAG.QUERY)
self.setXmlNs(_NS.MUC_OWNER)
self.setChild(DataForm(fields))
@classmethod
def getDefaultData(cls):
return DataForm.getDefaultData()
def parseTag(self, pyGlooxTag):
return self._getChildData(pyGlooxTag, 0, DataForm.getDefaultData())
class OwnerConfigurationFormQuery(PyQuery):
def __init__(self, to):
super(OwnerConfigurationFormQuery, self).__init__(IQ_TYPE.GET, OwnerConfigurationForm(), to)
class OwnerConfigurationFormSet(PyQuery):
def __init__(self, to, fields):
super(OwnerConfigurationFormSet, self).__init__(IQ_TYPE.SET, OwnerConfigurationForm(fields), to)
class OwnerConfigurationFormHandler(IQHandler):
def __init__(self):
super(OwnerConfigurationFormHandler, self).__init__(OwnerConfigurationForm())
class UserRoomConfigurationFormSet(OwnerConfigurationFormSet):
def __init__(self, to, room, password=''):
fields = (Field('text-single', 'muc#roomconfig_roomname', room),
Field('boolean', 'muc#roomconfig_persistentroom', 1),
Field('boolean', 'muc#roomconfig_publicroom', 1),
Field('boolean', 'muc#roomconfig_membersonly', 0),
Field('boolean', 'muc#roomconfig_allowinvites', 1),
Field('boolean', 'muc#roomconfig_survive_reboot', 1))
if password:
fields += (Field('boolean', 'muc#roomconfig_passwordprotectedroom', 1), Field('text-single', 'muc#roomconfig_roomsecret', password))
super(UserRoomConfigurationFormSet, self).__init__(to, fields)
| [
"[email protected]"
] | |
97d55e2aec24c8c3c273787b6a0bfb6e207c6ee0 | c261f0e98eedb4f0d85e92bd6ab8f4ae47096269 | /lifeservice/schedule117/04美食下载团购糯米/getNuomiOtherCinemaMap.py | 7e6d7d90119847ca9a6a6e964889df38e7707452 | [] | no_license | ShenDezhou/CPP | 24379fe24f3c8588a7859ee586527d5cc6bfbe73 | 933c1e764a6ed2879b26aa548ff67153ca026bf6 | refs/heads/master | 2021-01-11T22:09:24.900695 | 2017-04-05T02:04:07 | 2017-04-05T02:04:07 | 78,928,291 | 0 | 1 | null | null | null | null | GB18030 | Python | false | false | 1,328 | py |
#coding=gb2312
nuomiCinemaMap = dict()
otherCinemaMap = dict()
input = '/fuwu/Merger/Output/movie/cinema_movie_rel.table'
for line in open(input):
segs = line.strip('\n').decode('gb2312', 'ignore').split('\t')
cinemaid, source, ting = segs[1], segs[3], segs[9]
if source.find(u'糯米') != -1:
if cinemaid not in nuomiCinemaMap:
nuomiCinemaMap[cinemaid] = []
if ting not in nuomiCinemaMap[cinemaid]:
nuomiCinemaMap[cinemaid].append(ting)
else:
if cinemaid not in otherCinemaMap:
otherCinemaMap[cinemaid] = []
if ting not in otherCinemaMap[cinemaid]:
otherCinemaMap[cinemaid].append(ting)
# 糯米影院的厅名称是否都被包含
for cinemaid in otherCinemaMap:
if cinemaid not in nuomiCinemaMap:
#print ('#%s\t%s\t%s' % (cinemaid, u'糯米', '\t'.join(nuomiCinemaMap[cinemaid]))).encode('gb2312', 'ignore')
continue
noMatchTingList = []
for ting in nuomiCinemaMap[cinemaid]:
if ting not in otherCinemaMap[cinemaid]:
noMatchTingList.append(ting)
if len(noMatchTingList) == 0:
continue
# 存在不一致的情况
normTing = '\t'.join(otherCinemaMap[cinemaid])
noMatchTing = '\t'.join(noMatchTingList)
print ('%s\t%s\t%s' % (cinemaid, u'非糯米', normTing)).encode('gb2312', 'ignore')
print ('%s\t%s\t%s' % (cinemaid, u'糯米', noMatchTing)).encode('gb2312', 'ignore')
| [
"[email protected]"
] | |
b6e8f2be226188fbb1defabbcc1d134f8fc8e070 | 3570f2e7b8d5666cbd2d29a4c75965a75699a3e2 | /pyodbc/run_test.py | 1b0460f4bd5adc94625a5a8b380978050e9a9c4a | [] | no_license | ilanschnell/recipes | 7876225db2eb08b21d4d1ab426d40f94650192fd | c946b446a002d55ecffff6ce789cf9dcb57a65a6 | refs/heads/master | 2023-08-19T19:40:17.750037 | 2022-01-21T00:27:38 | 2022-01-21T00:27:38 | 119,077,116 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,181 | py | import sys
from os.path import isfile
print(sys.version)
print(sys.executable)
import pyodbc
print(pyodbc)
if sys.platform == 'darwin':
driver_path = '/Users/ilan/a/envs/py38/lib/libsqlite3odbc.dylib'
elif sys.platform.startswith('linux'):
driver_path = '/home/osboxes/bin/libsqlite3odbc-0.9996.so'
if not isfile(driver_path):
raise Exception('so such file: %r' % driver_path)
connect_string = (
"DRIVER={%s};SERVER=localhost;DATABASE=./test.sqlite;Trusted_connection=yes"
% driver_path
)
cnxn = pyodbc.connect(connect_string)
cursor = cnxn.cursor()
try:
cursor.execute('drop table foo')
except:
pass
cursor.execute('create table foo (symbol varchar(5), price float)')
N = 1000
for i in range(N):
cursor.execute("insert into foo (symbol, price) values (?, ?)",
(str(i), float(i)))
cursor.execute("commit")
cursor.execute("select * from foo")
dictarray = cursor.fetchdictarray()
cursor.close()
for i in range(N):
assert dictarray['symbol'][i] == str(i)
assert (dictarray['price'][i] - float(i)) < 1E-10
# tab completion fails in ipython for pyodbc.Cursor
assert pyodbc.Cursor.fetchdictarray.__doc__
print("Done.")
| [
"[email protected]"
] | |
2cbf9ce5648b670ee81e72a542610d78690a54f4 | 1097ed333a4000634e68a590ee6ffc6129ae61e3 | /written_examination/matrix8.py | 017cb25ae0dcc0f546bd9b3cf05825723bb344a7 | [
"MIT"
] | permissive | AutuanLiu/Code-Storm2019 | 1bbe890c7ca0d033c32348173bfebba612623a90 | 8efc7c5475fd888f7d86c3b08a3c1c9e55c1ac30 | refs/heads/master | 2020-04-23T07:03:08.975232 | 2019-10-24T08:56:26 | 2019-10-24T08:56:26 | 170,995,032 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,513 | py | def getSum(i, j, n, m, maps): # [i, j]单阵入口,[n,m]矩阵维度数,maps矩阵
queue, sump, maps[i][j] = [[i, j]], maps[i][j], 0 # 初始化队列
while queue:
x, y = queue[0][0], queue[0][1] # 获取队列头元素
for dx, dy in zip((-1, -1, 0, 1, 1, 1, 0, -1), (0, 1, 1, 1, 0, -1, -1, -1)): # 8个方向
nx, ny = x + dx, y + dy
if -1 < nx < n and -1 < ny < m and maps[nx][ny] != 0:
queue.append([nx, ny]) # 入队
sump += maps[nx][ny] # 累计兵力
maps[nx][ny] = 0 # 累计过的单个区域兵力为0
del queue[0] # 出队
return sump # 返回单阵的兵力总和
if __name__ == '__main__':
maps = [[34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 30], [0, 23, 10, 5, 5, 0, 0, 0, 5, 5, 5, 5, 5, 0, 0, 0, 30, 0, 40, 0],
[0, 9, 0, 0, 5, 0, 0, 0, 4, 4, 4, 4, 4, 0, 0, 0, 0, 30, 0, 0], [0, 8, 7, 7, 0, 5, 0, 0, 3, 3, 3, 3, 0, 0, 0, 0, 7, 0, 9, 0],
[0, 9, 0, 0, 5, 0, 5, 0, 0, 12, 12, 0, 0, 0, 0, 10, 0, 0, 0, 9], [0, 0, 0, 0, 5, 0, 0, 5, 0, 12, 12, 0, 0, 5, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 12, 0, 0, 5, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0], [40, 30, 3, 6, 6, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 0, 0, 0, 10, 0],
[0, 0, 20, 0, 0, 6, 6, 0, 0, 0, 0, 0, 0, 0, 5, 6, 5, 10, 10, 0], [40, 30, 3, 7, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 10, 0],
[0, 0, 0, 0, 0, 0, 0, 17, 0, 0, 0, 0, 17, 0, 0, 6, 5, 7, 7, 0], [0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 20, 0, 0, 7, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 10, 0, 0, 0], [0, 20, 0, 0, 7, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 10, 0, 0, 0],
[0, 20, 0, 0, 7, 0, 0, 0, 0, 4, 4, 0, 0, 0, 0, 0, 10, 0, 0, 0], [0, 30, 0, 7, 0, 0, 0, 0, 0, 5, 5, 0, 0, 0, 0, 0, 0, 10, 0, 50],
[0, 40, 7, 0, 0, 0, 0, 0, 0, 5, 5, 0, 0, 0, 0, 0, 0, 0, 50, 0], [43, 30, 25, 10, 50, 0, 0, 0, 6, 6, 6, 6, 0, 0, 0, 0, 0, 50, 0, 0]]
n, m = 20, 20 # 输入行列
army = []
for i in range(20):
for j in range(20):
if maps[i][j] != 0:
army.append(getSum(i, j, n, m, maps)) # 获取每个单阵的兵力和
print('每个单阵兵力和:', army)
print('单阵兵力最多为:', max(army))
print('单阵兵力最少为:', min(army))
| [
"[email protected]"
] | |
b11a8a7651e0f8dc115584ee90faf956ed6a1f89 | 997449072baa8e50a143ae1152fd4fa83c8e1068 | /devel/.private/rrtplanner/lib/python2.7/dist-packages/rrtplanner/msg/_rrtResult.py | 7672fe8883172dee48ff70b467d5d95c919942d0 | [] | no_license | idrissahil/catkin_ws | c547a6f7be812cc0bb1a93042026f746d34e7e70 | b5d8b60c882b60bb19b8d4529257ca513b8256e3 | refs/heads/master | 2022-01-24T12:51:28.038620 | 2019-06-02T16:05:45 | 2019-06-02T16:05:45 | 175,048,655 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,030 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from rrtplanner/rrtResult.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import geometry_msgs.msg
import nav_msgs.msg
import std_msgs.msg
class rrtResult(genpy.Message):
_md5sum = "58d6f138c7de7ef47c75d4b7e5df5472"
_type = "rrtplanner/rrtResult"
_has_header = False #flag to mark the presence of a Header object
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
# Define the result
nav_msgs/Path path
================================================================================
MSG: nav_msgs/Path
#An array of poses that represents a Path for a robot to follow
Header header
geometry_msgs/PoseStamped[] poses
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: geometry_msgs/PoseStamped
# A Pose with reference coordinate frame and timestamp
Header header
Pose pose
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of position and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
"""
__slots__ = ['path']
_slot_types = ['nav_msgs/Path']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
path
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(rrtResult, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.path is None:
self.path = nav_msgs.msg.Path()
else:
self.path = nav_msgs.msg.Path()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.path.header.seq, _x.path.header.stamp.secs, _x.path.header.stamp.nsecs))
_x = self.path.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.path.poses)
buff.write(_struct_I.pack(length))
for val1 in self.path.poses:
_v1 = val1.header
buff.write(_get_struct_I().pack(_v1.seq))
_v2 = _v1.stamp
_x = _v2
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v1.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v3 = val1.pose
_v4 = _v3.position
_x = _v4
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v5 = _v3.orientation
_x = _v5
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.path is None:
self.path = nav_msgs.msg.Path()
end = 0
_x = self
start = end
end += 12
(_x.path.header.seq, _x.path.header.stamp.secs, _x.path.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.path.header.frame_id = str[start:end].decode('utf-8')
else:
self.path.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.path.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.PoseStamped()
_v6 = val1.header
start = end
end += 4
(_v6.seq,) = _get_struct_I().unpack(str[start:end])
_v7 = _v6.stamp
_x = _v7
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v6.frame_id = str[start:end].decode('utf-8')
else:
_v6.frame_id = str[start:end]
_v8 = val1.pose
_v9 = _v8.position
_x = _v9
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v10 = _v8.orientation
_x = _v10
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
self.path.poses.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.path.header.seq, _x.path.header.stamp.secs, _x.path.header.stamp.nsecs))
_x = self.path.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.path.poses)
buff.write(_struct_I.pack(length))
for val1 in self.path.poses:
_v11 = val1.header
buff.write(_get_struct_I().pack(_v11.seq))
_v12 = _v11.stamp
_x = _v12
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v11.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v13 = val1.pose
_v14 = _v13.position
_x = _v14
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v15 = _v13.orientation
_x = _v15
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.path is None:
self.path = nav_msgs.msg.Path()
end = 0
_x = self
start = end
end += 12
(_x.path.header.seq, _x.path.header.stamp.secs, _x.path.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.path.header.frame_id = str[start:end].decode('utf-8')
else:
self.path.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.path.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.PoseStamped()
_v16 = val1.header
start = end
end += 4
(_v16.seq,) = _get_struct_I().unpack(str[start:end])
_v17 = _v16.stamp
_x = _v17
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v16.frame_id = str[start:end].decode('utf-8')
else:
_v16.frame_id = str[start:end]
_v18 = val1.pose
_v19 = _v18.position
_x = _v19
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v20 = _v18.orientation
_x = _v20
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
self.path.poses.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_4d = None
def _get_struct_4d():
global _struct_4d
if _struct_4d is None:
_struct_4d = struct.Struct("<4d")
return _struct_4d
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
_struct_2I = None
def _get_struct_2I():
global _struct_2I
if _struct_2I is None:
_struct_2I = struct.Struct("<2I")
return _struct_2I
_struct_3d = None
def _get_struct_3d():
global _struct_3d
if _struct_3d is None:
_struct_3d = struct.Struct("<3d")
return _struct_3d
| [
"[email protected]"
] | |
ada7809ed008445486cb53ed74ffb2f3f533ab06 | c05ed32f1ef7e1eb7d73efd674e7d1fd710ad171 | /daily-coding-problems/problem429.py | f131f4e79b05103324b498c75f6d6f5240e45cd3 | [] | no_license | carlhinderer/python-exercises | c8367517fdf835fa1117f96dbfee3dccc596afa6 | 4e09bbb4c4e2bd5644ed50e997db9f3c289a18f7 | refs/heads/master | 2021-06-01T16:17:00.389134 | 2021-02-09T18:21:01 | 2021-02-09T18:21:01 | 150,902,917 | 0 | 0 | null | 2021-04-20T20:33:11 | 2018-09-29T21:03:36 | Python | UTF-8 | Python | false | false | 533 | py | # Problem 429
# Medium
# Asked by Stitch Fix
#
# Pascal's triangle is a triangular array of integers constructed with the
# following formula:
#
# The first row consists of the number 1.
#
# For each subsequent row, each element is the sum of the numbers directly
# above it, on either side.
#
# For example, here are the first few rows:
#
# 1
# 1 1
# 1 2 1
# 1 3 3 1
# 1 4 6 4 1
#
# Given an input k, return the kth row of Pascal's triangle.
#
# Bonus: Can you do this using only O(k) space?
# | [
"[email protected]"
] | |
76b07fab07edb0667ffdda682c409887fdab50cc | 2cf99a155405b48bf14f872e1980ed948079e5dd | /test/test_router.py | a30b567e256a3ea2fe3ba97d23c6ab0b5d1539e8 | [
"MIT"
] | permissive | marrow/web.dispatch.route | c15309a26023d068b8f84ea4bbc221b674c1e6b8 | 92494bcad2e2a9a52d2e51eecfab910d829cc2de | refs/heads/master | 2021-01-25T04:01:46.245851 | 2016-02-15T07:54:36 | 2016-02-15T07:54:36 | 32,564,808 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,820 | py | # encoding: utf-8
import pytest
from web.dispatch.route.router import __DYNAMIC__, Router
from sample import Root
@pytest.fixture
def router():
return Router.from_object(Root)
def test_dynamic_repr():
assert repr(__DYNAMIC__) == '<dynamic element>'
def test_router_singleton():
assert Router.from_object(Root) is Router.from_object(Root)
def test_invalid_route():
router = Router()
with pytest.raises(ValueError):
router.parse("{bad:/}")
class TestRouterSample(object):
def test_single_static(self, router):
assert len(router.routes) == 1 # There's only a single top-level element.
assert 'user' in router.routes # It's "user".
assert len(router.routes['user']) == 2 # Which has a terminus and dynamic continuation.
assert router.routes['user'][None] == Root.root # The terminus is the "root" method.
assert router.routes['user'][None](Root()) == "I'm all people." # It really is.
def test_dynamic_username(self, router):
assert __DYNAMIC__ in router.routes['user']
dynamic = router.routes['user'][__DYNAMIC__]
assert len(dynamic) == 1
assert list(dynamic.keys())[0].match("GothAlice") # The regular expression matches.
assert len(list(dynamic.values())[0]) == 2
assert list(dynamic.values())[0][None] == Root.user
assert list(dynamic.values())[0][None](Root(), "GothAlice") == "Hi, I'm GothAlice"
def test_dynamic_username_action(self, router):
assert __DYNAMIC__ in router.routes['user']
dynamic = router.routes['user'][__DYNAMIC__]
assert len(dynamic) == 1
assert list(dynamic.keys())[0].match("GothAlice") # The regular expression matches.
assert len(list(dynamic.values())[0]) == 2
assert list(dynamic.values())[0][None] == Root.user
assert list(dynamic.values())[0][None](Root(), "GothAlice") == "Hi, I'm GothAlice"
| [
"[email protected]"
] | |
8bacb8e843f98006b0d409848f10edb92140f035 | f160cf4eb335ea799559312ac3d43a60c2c5848b | /library/zip_extract.py | e1f1faecce940706c2ead17d0b449c0c1525aa28 | [
"MIT"
] | permissive | baseplate-admin/Machine-Learning-Source-Code | c3389e0acb81e1f4c8e4c0cc763fcbc3781ef94e | a2203033d525c17b31584b52527c30e2c8aad1c4 | refs/heads/master | 2022-11-21T04:33:41.307477 | 2020-07-10T15:46:32 | 2020-07-10T15:46:32 | 277,730,993 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,211 | py | def zip_extract():
import os
from zipfile import ZipFile
def zip_function():
print("We are extracting ZIP!!!")
where_is_zip=input("What is your zip location?")
what_is_zip_name=input("What is your zip name?")
what_is_zip_extension=input("What is your ZIP format?")
zip_join=os.path.join(where_is_zip,what_is_zip_name+ '.'+ what_is_zip_extension)
with ZipFile(zip_join,"r") as zip:
zip.extractall()
zip.printdir()
print("Enter a Number or It will cause ValueError.")
how_many_zip=int(input('How many zip do you want to extract?'))
try:
print("""
This is a number!!
Lets Go!!!
""")
for i in range(how_many_zip):
ask_if_zip_extract=input("""
Do you want to extract zip?
Enter 0 to skip extracting zip.
Enter 1 to to extract ZIP.
""")
if int(ask_if_zip_extract)==0:
zip_function(2)
elif int(ask_if_zip_extract)==1:
zip_function(1)
else:
print("Theres a problem with zip extract.")
except Exception as e:
print(e)
| [
"[email protected]"
] | |
52722c46ff54f9d588bdd4cd1a24506d64dacd60 | bcc2d156334d3680561b17cec82cbc31a5ea07ad | /String/22. Generate Parentheses.py | 2431fefda0dcde528d7eafd0b65a378afe0ebe31 | [] | no_license | kevinsshah/Leetcode | 72b14e226b6881bcd18913b2fa132b0e3f8dd6ef | 4419f46e6f6b1d96ff8b7066fce687cfa88e65a0 | refs/heads/master | 2020-03-25T23:00:49.851183 | 2018-09-08T04:13:27 | 2018-09-08T04:13:27 | 144,255,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,129 | py | # Given n pairs of parentheses, write a function to generate all combinations of well-formed parentheses.
#
# For example, given n = 3, a solution set is:
#
# [
# "((()))",
# "(()())",
# "(())()",
# "()(())",
# "()()()"
# ]
class Solution(object):
def generateParenthesis(self, n):
"""
:type n: int
:rtype: List[str]
"""
# def helper(A = []):
# if len(A) == 2*n:
# if isValid(A):
# ans.append("".join(A))
# else:
# A.append("(")
# helper(A)
# A.pop()
# A.append(")")
# helper(A)
# A.pop()
# def isValid(A):
# bal = 0
# for c in A:
# if c == "(":
# bal+=1
# else:
# bal -= 1
# if bal < 0:
# return False
# return bal == 0
# ans = []
# helper()
# return ans
# def backtrack(S = '', left = 0, right = 0):
# if len(S) == 2*n:
# ans.append(S)
# return
# if left < n:
# backtrack(S+"(", left + 1, right)
# if right < left:
# backtrack(S+")", left, right + 1)
# ans = []
# backtrack()
# return ans
ans = []
def helper(left, right, string, ans):
if right < left:
return
if not left and not right:
ans.append(string)
return
if left:
helper(left - 1, right, string + "(", ans)
if right:
helper(left, right - 1, string + ")", ans)
helper(n, n, "", ans)
return ans | [
"[email protected]"
] | |
846876364bc01fda2b044a0b561e2709369cd56c | 268d9c21243e12609462ebbd6bf6859d981d2356 | /Python/python_stack/Django/BeltReview/main/apps/books/models.py | fddd59aa3b548da3b7fdfa2c3d3484b1350a19f0 | [] | no_license | dkang417/cdj | f840962c3fa8e14146588eeb49ce7dbd08b8ff4c | 9966b04af1ac8a799421d97a9231bf0a0a0d8745 | refs/heads/master | 2020-03-10T03:29:05.053821 | 2018-05-23T02:02:07 | 2018-05-23T02:02:07 | 129,166,089 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,886 | py | from __future__ import unicode_literals
from django.db import models
from django import forms
from django.core.exceptions import ValidationError
# Create your models here.
class UserManager(models.Manager):
def basic_validator(self,postData):
errors={}
#validate password
if len(postData['password']) < 8:
errors["password"] = "password should be more than 8 characters"
#checks that the passwords match
if postData['password'] != postData['confirm']:
errors["confirm"] = "passwords do not match"
return errors
class User(models.Model):
name = models.CharField(max_length=255)
alias = models.CharField(max_length=255)
email = models.CharField(max_length=255)
password = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
objects = UserManager()
class AuthorManager(models.Manager):
def validate_author(request, postData):
errors = {}
return errors
class Author(models.Model):
author = models.CharField(max_length=255)
objects = AuthorManager()
class BookManager(models.Manager):
def validate_book(request,postData):
errors = {}
return errors
class Book(models.Model):
title = models.CharField(max_length=255)
author = models.ForeignKey(Author, related_name="books")
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
objects = BookManager()
class ReviewManager(models.Manager):
def validate_review(request, postData):
errors = {}
return errors
class Review(models.Model):
rating = models.IntegerField()
comment = models.TextField()
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
book = models.ForeignKey(Book, related_name="reviews")
user = models.ForeignKey(User, related_name="reviews")
objects = ReviewManager()
| [
"[email protected]"
] | |
3e4331ea4515d8ab9a244201033c44ae2211e3db | d4cd2476f8fa8a7d94e183a68bd0678971310c5b | /checkio/06_Ice_Base/06_IceBase_04_FunnyAddition.py | 9030b3fb8d1063f001b7c9e2d024d3d76144968e | [] | no_license | gwqw/LessonsSolution | b495579f6d5b483c30d290bfa8ef0a2e29515985 | 0b841b1ae8867890fe06a5f0dcee63db9a3319a3 | refs/heads/master | 2020-07-05T19:15:53.758725 | 2019-10-01T11:34:44 | 2019-10-01T11:34:44 | 202,744,145 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | def checkio(data):
"""The sum of two integer elements"""
return sum(data)
if __name__ == '__main__':
assert checkio([5, 5]) == 10, 'First'
assert checkio([7, 1]) == 8, 'Second'
print('All ok')
| [
"="
] | = |
c78554bfaf8bee6f13777307c2c97139d339f973 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02390/s457532968.py | 390a81631bac8de1e3a93db961d2ef9a82cb8ed1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | import sys
line = sys.stdin.readline()
inp = int(line)
h,mod = inp//3600, inp%3600
m,mod = mod//60, mod%60
s = mod
print ("%d:%d:%d" % (h,m,s)) | [
"[email protected]"
] | |
b8a62fa93f2532714aacb95518a96010cd6afe03 | fffa7b13491deadfc649dfd035099ef764d8d303 | /api/tests/mathematical_object_detail.py | 3ecfae51fd020c715c1a8504027fcc57a26800f4 | [
"MIT"
] | permissive | Gawaboumga/OEMS | 3b12b8bebbe4b29716e8be4e22034ec394af36da | 1e60fa1f350f4cf1ca2e48072e0b4228eeb15024 | refs/heads/master | 2022-12-14T11:15:55.797241 | 2019-01-22T10:22:42 | 2019-01-22T10:22:42 | 147,358,167 | 0 | 0 | MIT | 2022-12-08T01:26:59 | 2018-09-04T14:20:58 | Python | UTF-8 | Python | false | false | 4,231 | py | from rest_framework import status
from rest_framework.test import APITestCase
from django.test import override_settings
from django.urls import reverse
from oems.settings import TEST_MEDIA_ROOT
from api.models import MathematicalObject
from api.tests import utils
@override_settings(MEDIA_ROOT=TEST_MEDIA_ROOT)
class MathematicalObjectDetailTests(APITestCase):
def test_retrieve_small_mathematical_object(self):
utils.log_as(self, utils.UserType.STAFF)
representation = 'test'
type = 'S'
data = {
'latex': representation,
'type': type,
}
response = self.client.post(reverse('api:mathematical_objects'), data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response = self.client.get(reverse('api:mathematical_object', kwargs={'pk': response.data['id']}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_data = response.data
self.assertEqual(representation, response_data['latex'])
self.assertEqual(type, response_data['type'])
def test_retrieve_full_mathematical_object(self):
utils.log_as(self, utils.UserType.STAFF)
representation = 'test'
type = 'S'
function = 'function'
name = 'name'
tag = 'tag'
convergence_radius = '|z < 1|'
data = {
'latex': representation,
'type': type,
'functions': [{'function': function}],
'names': [{'name': name}],
'tags': [{'tag': tag}],
'convergence_radius': convergence_radius
}
response = self.client.post(reverse('api:mathematical_objects'), data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response = self.client.get(reverse('api:mathematical_object', kwargs={'pk': response.data['id']}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_data = response.data
self.assertEqual(representation, response_data['latex'])
self.assertEqual(type, response_data['type'])
self.assertEqual(function, response_data['functions'][0]['function'])
self.assertEqual(name, response_data['names'][0]['name'])
self.assertEqual(tag, response_data['tags'][0]['tag'])
self.assertEqual(convergence_radius, response_data['convergence_radius'])
def test_put_small_mathematical_object(self):
utils.log_as(self, utils.UserType.STAFF)
representation = 'test'
type = 'S'
data = {
'latex': representation,
'type': type,
}
response = self.client.post(reverse('api:mathematical_objects'), data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
new_type = 'P'
data['type'] = new_type
response = self.client.put(reverse('api:mathematical_object', kwargs={'pk': response.data['id']}), data, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
response_data = response.data
self.assertEqual(representation, response_data['latex'])
self.assertEqual(new_type, response_data['type'])
def test_delete_full_mathematical_object(self):
utils.log_as(self, utils.UserType.STAFF)
representation = 'test'
type = 'S'
function = 'function'
name = 'name'
tag = 'tag'
convergence_radius = '|z < 1|'
data = {
'latex': representation,
'type': type,
'functions': [{'function': function}],
'names': [{'name': name}],
'tags': [{'tag': tag}],
'convergence_radius': convergence_radius
}
response = self.client.post(reverse('api:mathematical_objects'), data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response = self.client.delete(reverse('api:mathematical_object', kwargs={'pk': response.data['id']}), data, format='json')
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(MathematicalObject.objects.count(), 0)
| [
"[email protected]"
] | |
066a5edb911a9b5069125b1aee9dfad1bbc78dbb | 7d74195bd00cbe8516670c8fe718e983106c9830 | /src/data_types/test_collections_ordereddict.py | ee4fe8c69fee1eec3bc707d6f7b10d39022930d8 | [] | no_license | masa4u/example_python | 7ab3d48020855ad493336afcd8d0c02eb3104b2b | 7bdee4cb8e90255b20353f7f95d3e879f6462638 | refs/heads/master | 2021-01-18T14:10:56.539659 | 2017-03-28T12:52:08 | 2017-03-28T12:52:08 | 30,511,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 147 | py | d = {'banana': 3, 'apple':4, 'pear': 1, 'orange': 2}
from collections import OrderedDict
print OrderedDict(sorted(d.items(), key=lambda t:t[0]))
| [
"[email protected]"
] | |
4f17a87004d2e33cbb26f6d49b7cb84a0b7ffef9 | 70532360ddfdd8006bf7044c117403ce837cef0a | /code/Rplot.py | cd1f9b2b402c74ca5ecf9502d4eba1665cd10a9b | [] | no_license | wsgan001/campus_wifi_analysis | 09a7944f5019f726682925c8785cdf5f7d8c469a | c470135691ff8faad3cb4755301e4f59389e2c5a | refs/heads/master | 2020-03-10T11:09:05.579870 | 2017-03-03T07:13:57 | 2017-03-03T07:13:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,312 | py | # -*- coding: utf-8 -*-
import fileinput
user = {}
for line in fileinput.input("../data/select/select_a"):
mac = line.strip().split(" ")[0]
user[mac] = True
fileinput.close()
with open("../data/plot/R_trace_all","w") as f:
f.write("mac time dura\n")
for line in fileinput.input("../data/feature/trace_all_statistic_filter"):
part = line.strip().split(" ")
mac, objs = part[0], part[3:]
if user.has_key(mac):
for one in objs:
tag, rto = one.split("@")[0], str(int(one.split("@")[1].split(",")[0])/42)
if tag in ["0","1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16","17","18","19","20","21","22","23"]:
f.write(mac+" "+tag+" "+rto+"\n")
fileinput.close()
with open("../data/plot/R_trace_online","w") as f:
f.write("mac time dura\n")
for line in fileinput.input("../data/feature/trace_online_statistic_filter"):
part = line.strip().split(" ")
mac, objs = part[0], part[3:]
if user.has_key(mac):
for one in objs:
tag, rto = one.split("@")[0], str(int(one.split("@")[1].split(",")[0])/42)
if tag in ["0","1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16","17","18","19","20","21","22","23"]:
f.write(mac+" "+tag+" "+rto+"\n")
fileinput.close()
jac = {}
for line in fileinput.input("../data/jaccount/jaccount_taged"):
part = line.strip().split(" ")
dev, mac, sex, sta, col, age = part[0], part[1], part[2], part[3], part[4], int(part[5])
if dev == "mobile":
jac[mac] = {'sex':sex, 'sta':sta, 'col':col, 'age':age}
if sex == "男性":
jac[mac]['sex'] = "Male"
elif sex == "女性":
jac[mac]['sex'] = "Female"
if age <= 20:
jac[mac]['age'] = "<=20"
elif age > 20 and age <=22 :
jac[mac]['age'] = "21~22"
elif age > 22:
jac[mac]['age'] = ">=23"
if col == "电子信息与电气工程学院":
jac[mac]['col'] = "TOP1"
elif col == "机械与动力工程学院":
jac[mac]['col'] = "TOP2"
elif col == "材料科学与工程学院":
jac[mac]['col'] = "TOP3"
elif col == "船舶海洋与建筑工程学院":
jac[mac]['col'] = "TOP4"
elif col == "安泰经济与管理学院":
jac[mac]['col'] = "TOP5"
fileinput.close()
with open("../data/plot/R_trace_all_cor","w") as f:
f.write("mac Acad Adm Ath Cant Hosp Lib Soc Supp Teach Other sex age\n")
for line in fileinput.input("../data/feature/trace_all_statistic_filter"):
part = line.strip().split(" ")
mac, objs, user = part[0], part[3:], {"Acad":"0","Adm":"0","Ath":"0","Cant":"0","Hosp":"0","Lib":"0","Soc":"0","Supp":"0","Teach":"0","Other":"0"}
for one in objs:
tag, rto = one.split("@")[0], one.split("@")[1].split(",")[0]
if tag in ["Acad","Adm","Ath","Cant","Hosp","Lib","Soc","Supp","Teach","Other"]:
user[tag] = rto
f.write(mac+' '+user['Acad']+' '+user['Adm']+' '+user['Ath']+' '+user['Cant']+' '+user['Hosp']+' '+user['Lib']+' '+user['Soc']+' '+user['Supp']+' '+user['Teach']+' '+user['Other']+' '+jac[mac]['sex']+' '+jac[mac]['age']+'\n')
fileinput.close()
with open("../data/plot/R_trace_online_cor","w") as f:
f.write("mac Acad Adm Ath Cant Hosp Lib Soc Supp Teach Other sex age\n")
for line in fileinput.input("../data/feature/trace_online_statistic_filter"):
part = line.strip().split(" ")
mac, objs, user = part[0], part[3:], {"Acad":"0","Adm":"0","Ath":"0","Cant":"0","Hosp":"0","Lib":"0","Soc":"0","Supp":"0","Teach":"0","Other":"0"}
for one in objs:
tag, rto = one.split("@")[0], one.split("@")[1].split(",")[0]
if tag in ["Acad","Adm","Ath","Cant","Hosp","Lib","Soc","Supp","Teach","Other"]:
user[tag] = rto
f.write(mac+' '+user['Acad']+' '+user['Adm']+' '+user['Ath']+' '+user['Cant']+' '+user['Hosp']+' '+user['Lib']+' '+user['Soc']+' '+user['Supp']+' '+user['Teach']+' '+user['Other']+' '+jac[mac]['sex']+' '+jac[mac]['age']+'\n')
fileinput.close()
# 1:renren, 2:baidu, 3:sina, 4:taobao, 5:qq
mapping = {'1':'1','2':'1','3':'1','27':'1','46':'1','64':'1','69':'1',\
'5':'2','6':'2','21':'2','22':'2','26':'2','60':'2','63':'2','70':'2','77':'2','80':'2','93':'2','98':'2',\
'11':'3','15':'3','16':'3','17':'3','23':'3','24':'3','28':'3','29':'3','51':'3','82':'3','84':'3',\
'19':'4','23':'4','36':'4','39':'4','42':'4','56':'4','57':'4','58':'4','59':'4',\
'20':'5','31':'5','41':'5','45':'5','48':'5','86':'5',\
}
with open("../data/plot/R_trace_http_cor","w") as f:
f.write("mac renren baidu sina taobao qq sex age\n")
for line in fileinput.input("../data/feature/trace_http_statistic_filter"):
part = line.strip().split(" ")
mac, objs, user = part[0], part[3:], {"renren":0,"baidu":0,"sina":0,"taobao":0,"qq":0}
for one in objs:
tag, rto = one.split("@")[0], int(one.split("@")[1].split(",")[1])
if len(tag.split("+")) == 2 and tag.split("+")[0] == "WD" and ":" in tag:
tag = tag.split("+")[1]
hst, typ = tag.split(":")[0], tag.split(":")[1]
if mapping.has_key(hst):
top = mapping[hst]
if top == "1":
user['renren'] += rto
elif top == "2":
user['baidu'] += rto
elif top == "3":
user['sina'] += rto
elif top == "4":
user['taobao'] += rto
elif top == "5":
user['qq'] += rto
f.write(mac+' '+str(user['renren'])+' '+str(user['baidu'])+' '+str(user['sina'])+' '+str(user['taobao'])+' '+str(user['qq'])+' '+jac[mac]['sex']+' '+jac[mac]['age']+'\n')
fileinput.close()
| [
"[email protected]"
] | |
caff9c7cb685bc07ae6b58176aa41c8d83544348 | 9f0a4262c4402201df1cdd5674a679543f4a50b5 | /shaderLibrary_maya2017/resources/__init__.py | 05e522a865f16bd93dd2591fa2f1e5a4d20967ec | [] | no_license | subing85/subins-toolkits | 611b6b3b3012ccb023096f6e21d18d2bda5a534b | d02af1289ec3ee5bce6fa3d78c134a8847113aa6 | refs/heads/master | 2022-07-12T17:19:57.411454 | 2022-07-01T20:37:16 | 2022-07-01T20:37:16 | 168,826,548 | 11 | 2 | null | 2022-07-02T01:03:34 | 2019-02-02T11:51:25 | Mathematica | UTF-8 | Python | false | false | 1,087 | py | import os
from shaderLibrary_maya2017.utils import platforms
CURRENT_PATH = os.path.dirname(__file__)
MODULE = platforms.get_tool_kit()[0]
def getInputPath(module=None):
return os.path.join(
CURRENT_PATH, "inputs", "{}.json".format(module)
)
def getIconPath():
return os.path.join(CURRENT_PATH, "icons")
def getPreferencePath():
return os.path.join(getWorkspacePath(), "preference")
def getWorkspacePath():
return os.path.join(os.getenv("HOME"), "Documents", MODULE)
def getPublishDirectory():
return os.path.join(
os.environ["HOME"], "Walk_cycle", "characters"
)
def getResourceTypes():
data = {
"preference": getPreferencePath(),
"shader": getWorkspacePath(),
"generic": None,
}
return data
def getToolKitLink():
return "https://www.subins-toolkits.com"
def getToolKitHelpLink():
return "https://vimeo.com/314966208"
def getDownloadLink():
return "https://www.subins-toolkits.com/shader-library"
# end ####################################################################
| [
"[email protected]"
] | |
64ebd8dc8dee1409f7462da7e97b36589440ca93 | 897d82d4953ed7b609746a0f252f3f3440b650cb | /evening/20200615/demo3.py | fb8a2467fdd7cd54f0e4530ae9c506eeaa9352c6 | [] | no_license | haiou90/aid_python_core | dd704e528a326028290a2c18f215b1fd399981bc | bd4c7a20950cf7e22e8e05bbc42cb3b3fdbe82a1 | refs/heads/master | 2022-11-26T19:13:36.721238 | 2020-08-07T15:05:17 | 2020-08-07T15:05:17 | 285,857,695 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 947 | py | class GParent:
pass
class Parent(GParent):
def __init__(self,atk,hp):
self.atk = atk
self.hp = hp
def attack(self,target):
pass
def damage(self,value):
pass
#玩家攻击敌人 敌人受伤,还可能死亡
class Player(Parent,GParent):
def attack(self,target):
print('黑虎掏心')
target.damage(self.atk)
def damage(self,value):
print('小样你敢打我!')
self.hp -= value
if self.hp <= 0:
print('太菜了')
class Enemy(Parent):
def attack(self,target):
print('普通攻击第一式')
target.damage(self.atk)
def damage(self,value):
print('玩家打人啦')
self.hp -= value
if self.hp <= 0:
print('a~~~~')
print('爆装备')
p1 = Player(50,100)
e1 = Enemy(10,100)
p1.attack(e1)
e1.attack(p1)
e1.attack(p1)
e1.attack(p1)
e1.attack(p1)
p1.attack(e1)
| [
"[email protected]"
] | |
21d9a316ce6cfdf96f3a9f5edaacf77894c81bf4 | e9d52dcf101aea0327c6b0d7e5244c91dfd62cf6 | /spexy/adv/samples/simple.py | e2df8a641ff75635616d8894582fa8f83e6bf7dd | [] | no_license | drufat/spexy | 6eba9f44a5539245486cd4ef8fefd24bdb7ade6a | 53255009c1830501986afbf6688142ddefe17b9a | refs/heads/master | 2021-09-18T19:51:47.313946 | 2018-07-19T05:09:02 | 2018-07-19T05:09:02 | 100,453,374 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 179 | py | # Copyright (C) 2010-2016 Dzhelil S. Rufat. All Rights Reserved.
from sympy import sin, cos
def V(x, y):
return (-sin(y), sin(x))
def p(x, y):
return -cos(x) * cos(y)
| [
"[email protected]"
] | |
77576f4bd93940f460a967a46375dcb841c71094 | 4a418036130cb63caa503719b4162cce9753459b | /nemo/collections/nlp/modules/common/transformer/transformer_modules.py | 63998217f09b5eaa659f8bbb583c263a6befd154 | [
"Apache-2.0"
] | permissive | kssteven418/Q-ASR | 89a7dac24d74556453e7b54b26289fd1466070c4 | aa1ec2ef78fd7606f8f365dfe3e66691a0e48178 | refs/heads/qasr | 2023-08-05T15:43:42.493513 | 2021-10-11T20:06:53 | 2021-10-11T20:06:53 | 353,027,973 | 33 | 1 | Apache-2.0 | 2021-03-30T17:33:26 | 2021-03-30T14:20:56 | Jupyter Notebook | UTF-8 | Python | false | false | 8,624 | py | # Copyright 2018 The Google AI Language Team Authors and
# The HuggingFace Inc. team.
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import torch
from torch import nn
from torch.nn.functional import gelu
__all__ = ["TransformerEmbedding"]
class FixedPositionalEncoding(nn.Module):
"""
Fixed positional encoding (embedding layer) from sine and cosine functions
of different frequencies according to https://arxiv.org/abs/1706.03762
Args:
hidden_size: size of the embeddings in the model, also known as d_model
max_sequence_length: maximum allowed length of the input sequence
"""
def __init__(self, hidden_size, max_sequence_length=512):
super().__init__()
pos_enc = torch.zeros(max_sequence_length, hidden_size)
position = torch.arange(0.0, max_sequence_length).unsqueeze(1)
coef = -math.log(10000.0) / hidden_size
div_term = torch.exp(coef * torch.arange(0.0, hidden_size, 2))
pos_enc[:, 0::2] = torch.sin(position * div_term)
pos_enc[:, 1::2] = torch.cos(position * div_term)
pos_enc.div_(math.sqrt(hidden_size))
self.register_buffer('pos_enc', pos_enc)
def forward(self, position_ids):
return torch.embedding(self.pos_enc, position_ids)
class TransformerEmbedding(nn.Module):
"""
Embedding from token and position embeddings.
Optionally add token_type embedding (e.g. type of the sentence in BERT).
Args:
vocab_size: size of the vocabulary
hidden_size: size of the embeddings in the model, also known as d_model
max_sequence_length: maximum allowed length of the input sequence
num_token_types: number of different token types
(e.g. tokens of sentence A and tokens of sentence B in BERT)
embedding_dropout: probability of dropout applied to embeddings
learn_positional_encodings: whether to learn positional encodings or
use fixed (sine-cosine) ones
"""
def __init__(
self,
vocab_size,
hidden_size,
max_sequence_length=512,
num_token_types=2,
embedding_dropout=0.0,
learn_positional_encodings=False,
):
super().__init__()
self.max_sequence_length = max_sequence_length
self.token_embedding = nn.Embedding(vocab_size, hidden_size, padding_idx=0)
if learn_positional_encodings:
self.position_embedding = nn.Embedding(max_sequence_length, hidden_size)
else:
self.position_embedding = FixedPositionalEncoding(hidden_size, max_sequence_length)
self.token_type_embedding = nn.Embedding(num_token_types, hidden_size)
self.layer_norm = nn.LayerNorm(hidden_size, eps=1e-5)
self.dropout = nn.Dropout(embedding_dropout)
def forward(self, input_ids, token_type_ids=None, start_pos=0):
seq_length = input_ids.size(1)
if seq_length > self.max_sequence_length:
raise ValueError(
f"Input sequence is longer than maximum allowed sequence length for positional encoding. "
f"Got {seq_length} and {self.max_sequence_length}"
)
position_ids = torch.arange(
start=start_pos, end=start_pos + seq_length, dtype=torch.long, device=input_ids.device
)
position_ids = position_ids.unsqueeze(0).expand_as(input_ids)
token_embeddings = self.token_embedding(input_ids)
position_embeddings = self.position_embedding(position_ids)
embeddings = token_embeddings + position_embeddings
if token_type_ids is not None:
token_type_embeddings = self.token_type_embedding(token_type_ids)
embeddings = embeddings + token_type_embeddings
embeddings = self.layer_norm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class MultiHeadAttention(nn.Module):
"""
Multi-head scaled dot-product attention layer.
Args:
hidden_size: size of the embeddings in the model, also known as d_model
num_attention_heads: number of heads in multi-head attention
attn_score_dropout: probability of dropout applied to attention scores
attn_layer_dropout: probability of dropout applied to the output of the
whole layer, but before layer normalization
"""
def __init__(self, hidden_size, num_attention_heads, attn_score_dropout=0.0, attn_layer_dropout=0.0):
super().__init__()
if hidden_size % num_attention_heads != 0:
raise ValueError(
"The hidden size (%d) is not a multiple of the number "
"of attention heads (%d)" % (hidden_size, num_attention_heads)
)
self.hidden_size = hidden_size
self.num_attention_heads = num_attention_heads
self.attn_head_size = int(hidden_size / num_attention_heads)
self.attn_scale = math.sqrt(math.sqrt(self.attn_head_size))
self.query_net = nn.Linear(hidden_size, hidden_size)
self.key_net = nn.Linear(hidden_size, hidden_size)
self.value_net = nn.Linear(hidden_size, hidden_size)
self.out_projection = nn.Linear(hidden_size, hidden_size)
self.attn_dropout = nn.Dropout(attn_score_dropout)
self.layer_dropout = nn.Dropout(attn_layer_dropout)
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attn_head_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def forward(self, queries, keys, values, attention_mask):
# attention_mask is needed to hide the tokens which correspond to [PAD]
# in the case of BERT, or to hide the future tokens in the case of
# vanilla language modeling and translation
query = self.query_net(queries)
key = self.key_net(keys)
value = self.value_net(values)
query = self.transpose_for_scores(query) / self.attn_scale
key = self.transpose_for_scores(key) / self.attn_scale
value = self.transpose_for_scores(value)
# for numerical stability we pre-divide query and key by sqrt(sqrt(d))
attention_scores = torch.matmul(query, key.transpose(-1, -2))
if attention_mask is not None:
attention_scores = attention_scores + attention_mask.to(attention_scores.dtype)
attention_probs = torch.softmax(attention_scores, dim=-1)
attention_probs = self.attn_dropout(attention_probs)
context = torch.matmul(attention_probs, value)
context = context.permute(0, 2, 1, 3).contiguous()
new_context_shape = context.size()[:-2] + (self.hidden_size,)
context = context.view(*new_context_shape)
# output projection
output_states = self.out_projection(context)
output_states = self.layer_dropout(output_states)
return output_states
class PositionWiseFF(nn.Module):
"""
Position-wise feed-forward network of Transformer block.
Args:
hidden_size: size of the embeddings in the model, also known as d_model
inner_size: number of neurons in the intermediate part of feed-forward
net, usually is (4-8 x hidden_size) in the papers
ffn_dropout: probability of dropout applied to net output
hidden_act: activation function used between two linear layers
"""
def __init__(self, hidden_size, inner_size, ffn_dropout=0.0, hidden_act="relu"):
super().__init__()
self.dense_in = nn.Linear(hidden_size, inner_size)
self.dense_out = nn.Linear(inner_size, hidden_size)
self.layer_dropout = nn.Dropout(ffn_dropout)
ACT2FN = {"gelu": gelu, "relu": torch.relu}
self.act_fn = ACT2FN[hidden_act]
def forward(self, hidden_states):
output_states = self.dense_in(hidden_states)
output_states = self.act_fn(output_states)
output_states = self.dense_out(output_states)
output_states = self.layer_dropout(output_states)
return output_states
| [
"[email protected]"
] | |
87990ee7c013adfed4d8152d526bab78f47feee2 | 9550ce4a80169d21b556b22679a9462f98438e32 | /app/urls.py | 32f3b1ab973c04cbcb9ce11ea3ea6d0850315945 | [
"Apache-2.0"
] | permissive | erics1996/questionnaire_django | 87cc44bd745eb810861349effc126ed3dfbd6508 | 1006c61eba1e9efec0801299938eb13c16a0b292 | refs/heads/master | 2022-12-15T04:47:39.042594 | 2020-09-02T17:34:33 | 2020-09-02T17:34:33 | 284,580,189 | 0 | 0 | Apache-2.0 | 2020-09-02T17:34:34 | 2020-08-03T02:02:20 | Python | UTF-8 | Python | false | false | 300 | py | from django.contrib import admin
from django.urls import path, re_path
from .views import backend
urlpatterns = [
path('', backend.IndexView.as_view()),
re_path('survey/(?P<pk>\d+)/', backend.SurveyDetailView.as_view()),
re_path('(?P<pk>\d+)/download/', backend.DownloadView.as_view())
] | [
"[email protected]"
] | |
a78acddf6eebc59cad1ebc0e8fdaf53ee0ce2702 | 44a7101ae18c84ffa0e3c674763ba7b500937773 | /root/Desktop/Scripts/pyinstaller-1.5.1/bh_sshRcmd/bh_sshRcmd.spec | 66707266787869a8fdd977ad9985b57711fe3880 | [] | no_license | Draft2007/Scripts | cbaa66ce0038f3370c42d93da9308cbd69fb701a | 0dcc720a1edc882cfce7498ca9504cd9b12b8a44 | refs/heads/master | 2016-09-05T20:05:46.601503 | 2015-06-23T00:05:02 | 2015-06-23T00:05:02 | 37,945,893 | 7 | 2 | null | null | null | null | UTF-8 | Python | false | false | 561 | spec | # -*- mode: python -*-
a = Analysis([os.path.join(HOMEPATH,'support/_mountzlib.py'), os.path.join(HOMEPATH,'support/useUnicode.py'), '/usr/local/tools/bh_sshRcmd.py'],
pathex=['/usr/local/tools/pyinstaller-1.5.1'])
pyz = PYZ(a.pure)
exe = EXE( pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
name=os.path.join('dist', 'bh_sshRcmd'),
debug=False,
strip=False,
upx=True,
console=1 )
app = BUNDLE(exe,
name=os.path.join('dist', 'bh_sshRcmd.app'))
| [
"[email protected]"
] | |
6925f9d279dd7fc2386a10b7f0527b1c88816f95 | a4ea525e226d6c401fdb87a6e9adfdc5d07e6020 | /src/azure-cli/azure/cli/command_modules/servicebus/aaz/latest/servicebus/topic/_list.py | 751ddf434b8c609435a955fc4eaa4a17a49bdf38 | [
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-2-Clause"
] | permissive | Azure/azure-cli | 13340eeca2e288e66e84d393fa1c8a93d46c8686 | a40fd14ad0b6e89720a2e58d4d9be3a6ce1535ca | refs/heads/dev | 2023-08-17T06:25:37.431463 | 2023-08-17T06:00:10 | 2023-08-17T06:00:10 | 51,040,886 | 4,018 | 3,310 | MIT | 2023-09-14T11:11:05 | 2016-02-04T00:21:51 | Python | UTF-8 | Python | false | false | 10,902 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"servicebus topic list",
)
class List(AAZCommand):
"""List all the topics in a namespace.
"""
_aaz_info = {
"version": "2022-01-01-preview",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.servicebus/namespaces/{}/topics", "2022-01-01-preview"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
return self.build_paging(self._execute_operations, self._output)
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.namespace_name = AAZStrArg(
options=["--namespace-name"],
help="The namespace name",
required=True,
fmt=AAZStrArgFormat(
max_length=50,
min_length=6,
),
)
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.skip = AAZIntArg(
options=["--skip"],
help="Skip is only used if a previous operation returned a partial result. If a previous response contains a nextLink element, the value of the nextLink element will include a skip parameter that specifies a starting point to use for subsequent calls.",
fmt=AAZIntArgFormat(
maximum=1000,
minimum=0,
),
)
_args_schema.top = AAZIntArg(
options=["--top"],
help="May be used to limit the number of results to the most recent N usageDetails.",
fmt=AAZIntArgFormat(
maximum=1000,
minimum=1,
),
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.TopicsListByNamespace(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance.value, client_flatten=True)
next_link = self.deserialize_output(self.ctx.vars.instance.next_link)
return result, next_link
class TopicsListByNamespace(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/topics",
**self.url_parameters
)
@property
def method(self):
return "GET"
@property
def error_format(self):
return "MgmtErrorFormat"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"namespaceName", self.ctx.args.namespace_name,
required=True,
),
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"$skip", self.ctx.args.skip,
),
**self.serialize_query_param(
"$top", self.ctx.args.top,
),
**self.serialize_query_param(
"api-version", "2022-01-01-preview",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZObjectType()
_schema_on_200 = cls._schema_on_200
_schema_on_200.next_link = AAZStrType(
serialized_name="nextLink",
)
_schema_on_200.value = AAZListType()
value = cls._schema_on_200.value
value.Element = AAZObjectType()
_element = cls._schema_on_200.value.Element
_element.id = AAZStrType(
flags={"read_only": True},
)
_element.location = AAZStrType(
flags={"read_only": True},
)
_element.name = AAZStrType(
flags={"read_only": True},
)
_element.properties = AAZObjectType(
flags={"client_flatten": True},
)
_element.system_data = AAZObjectType(
serialized_name="systemData",
flags={"read_only": True},
)
_element.type = AAZStrType(
flags={"read_only": True},
)
properties = cls._schema_on_200.value.Element.properties
properties.accessed_at = AAZStrType(
serialized_name="accessedAt",
flags={"read_only": True},
)
properties.auto_delete_on_idle = AAZStrType(
serialized_name="autoDeleteOnIdle",
)
properties.count_details = AAZObjectType(
serialized_name="countDetails",
)
properties.created_at = AAZStrType(
serialized_name="createdAt",
flags={"read_only": True},
)
properties.default_message_time_to_live = AAZStrType(
serialized_name="defaultMessageTimeToLive",
)
properties.duplicate_detection_history_time_window = AAZStrType(
serialized_name="duplicateDetectionHistoryTimeWindow",
)
properties.enable_batched_operations = AAZBoolType(
serialized_name="enableBatchedOperations",
)
properties.enable_express = AAZBoolType(
serialized_name="enableExpress",
)
properties.enable_partitioning = AAZBoolType(
serialized_name="enablePartitioning",
)
properties.max_message_size_in_kilobytes = AAZIntType(
serialized_name="maxMessageSizeInKilobytes",
)
properties.max_size_in_megabytes = AAZIntType(
serialized_name="maxSizeInMegabytes",
)
properties.requires_duplicate_detection = AAZBoolType(
serialized_name="requiresDuplicateDetection",
)
properties.size_in_bytes = AAZIntType(
serialized_name="sizeInBytes",
flags={"read_only": True},
)
properties.status = AAZStrType()
properties.subscription_count = AAZIntType(
serialized_name="subscriptionCount",
flags={"read_only": True},
)
properties.support_ordering = AAZBoolType(
serialized_name="supportOrdering",
)
properties.updated_at = AAZStrType(
serialized_name="updatedAt",
flags={"read_only": True},
)
count_details = cls._schema_on_200.value.Element.properties.count_details
count_details.active_message_count = AAZIntType(
serialized_name="activeMessageCount",
flags={"read_only": True},
)
count_details.dead_letter_message_count = AAZIntType(
serialized_name="deadLetterMessageCount",
flags={"read_only": True},
)
count_details.scheduled_message_count = AAZIntType(
serialized_name="scheduledMessageCount",
flags={"read_only": True},
)
count_details.transfer_dead_letter_message_count = AAZIntType(
serialized_name="transferDeadLetterMessageCount",
flags={"read_only": True},
)
count_details.transfer_message_count = AAZIntType(
serialized_name="transferMessageCount",
flags={"read_only": True},
)
system_data = cls._schema_on_200.value.Element.system_data
system_data.created_at = AAZStrType(
serialized_name="createdAt",
)
system_data.created_by = AAZStrType(
serialized_name="createdBy",
)
system_data.created_by_type = AAZStrType(
serialized_name="createdByType",
)
system_data.last_modified_at = AAZStrType(
serialized_name="lastModifiedAt",
)
system_data.last_modified_by = AAZStrType(
serialized_name="lastModifiedBy",
)
system_data.last_modified_by_type = AAZStrType(
serialized_name="lastModifiedByType",
)
return cls._schema_on_200
class _ListHelper:
"""Helper class for List"""
__all__ = ["List"]
| [
"[email protected]"
] | |
dc0f1debf616d07e130ae2adb13b8209fd2e2f74 | 99afa83eda09cf552466ddf90314cb01d07b166a | /testapp/models.py | c1fa45c2c96048893e614bf9142070231858f126 | [] | no_license | jithinvijayan007/Lithoera | 358c9a6191d6510ac07229e7a92eadd89d70e14f | 33e3639e882f79b12541f92070dad74483fdfa72 | refs/heads/master | 2023-01-05T18:29:37.388869 | 2020-11-02T11:58:27 | 2020-11-02T11:58:27 | 309,316,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,764 | py | from django.db import models
from django.contrib.auth.models import AbstractBaseUser, BaseUserManager
# Create your models here.
class MyAccountManager(BaseUserManager):
def create_user(self, email, username, password=None):
if not email:
raise ValueError('Users must have an email address')
if not username:
raise ValueError('Users must have a username')
user = self.model(
email=self.normalize_email(email),
username=username,
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, username, password):
user = self.create_user(
email=self.normalize_email(email),
password=password,
username=username,
)
user.is_admin = True
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
class Account(AbstractBaseUser):
email = models.EmailField(verbose_name="email", max_length=60, unique=True)
username = models.CharField(max_length=30, unique=True)
date_joined = models.DateTimeField(verbose_name='date joined', auto_now_add=True)
last_login = models.DateTimeField(verbose_name='last login', auto_now=True)
is_admin = models.BooleanField(default=False)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username']
objects = MyAccountManager()
def __str__(self):
return self.email
# For checking permissions. to keep it simple all admin have ALL permissons
def has_perm(self, perm, obj=None):
return self.is_admin
# Does this user have permission to view this app? (ALWAYS YES FOR SIMPLICITY)
def has_module_perms(self, app_label):
return True
| [
"[email protected]"
] | |
21064aaea82657175bb68471f1411164393e0210 | 657c80336bce1cc6158cd349ce208c5e680a4d0d | /contrib/projection/tests/projection/base_projection.py | de53d6895412de112d31a959926d9cdb47b6ef9c | [
"BSD-3-Clause"
] | permissive | Xinmudotmoe/pyglet | b37628618647bf3b1e3d7db28202a5e14c60450c | 144257c365ca85528c6a4c5bed8141e683d7a9b6 | refs/heads/master | 2021-05-29T22:05:40.676643 | 2015-10-24T05:55:49 | 2015-10-24T05:55:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | #!/usr/bin/python
# $Id:$
from pyglet.gl import *
def fillrect(x, y, width, height):
glBegin(GL_QUADS)
glVertex2f(x, y)
glVertex2f(x + width, y)
glVertex2f(x + width, y + height)
glVertex2f(x, y + height)
glEnd()
def rect(x, y, width, height):
glBegin(GL_LINE_LOOP)
glVertex2f(x, y)
glVertex2f(x + width, y)
glVertex2f(x + width, y + height)
glVertex2f(x, y + height)
glEnd()
| [
"[email protected]"
] | |
d811f5d03ae12bdeb567632e2d82b3ecccc87751 | a1e3e7cf1d27b85d9472c6353e7646d37528b241 | /q11.py | 3ea7528239387d3ae6df885be655e4e6ebe1b32f | [] | no_license | osama1998H/standerdLearnd-string | 421148f81c2c604f6c75dac568ff1faeb20922ce | 0af39cd2fd43be45bb54aca2826bc8bf56e399ed | refs/heads/main | 2023-09-01T04:21:52.499680 | 2021-05-15T19:54:50 | 2021-05-15T19:54:50 | 365,533,408 | 0 | 0 | null | 2023-08-29T08:31:40 | 2021-05-08T14:21:53 | Python | UTF-8 | Python | false | false | 325 | py | string = input("enter the string: ")
def del_odd(string: str)->str:
new_string = ""
string = [i for i in string]
for i in string:
if string.index(i) % 2 != 0:
string.remove(i)
for i in string:
new_string += i
return new_string
new_string = del_odd(string)
print(new_string)
| [
"[email protected]"
] | |
da3f5d0d4b3c71ac3db45cece6411a3233f8b68a | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/webpubsub/azure-mgmt-webpubsub/generated_samples/web_pub_sub_replicas_create_or_update.py | 81ff6144e4226d349866642540011deb03744386 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,920 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.webpubsub import WebPubSubManagementClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-webpubsub
# USAGE
python web_pub_sub_replicas_create_or_update.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = WebPubSubManagementClient(
credential=DefaultAzureCredential(),
subscription_id="00000000-0000-0000-0000-000000000000",
)
response = client.web_pub_sub_replicas.begin_create_or_update(
resource_group_name="myResourceGroup",
resource_name="myWebPubSubService",
replica_name="myWebPubSubService-eastus",
parameters={
"location": "eastus",
"properties": {},
"sku": {"capacity": 1, "name": "Premium_P1", "tier": "Premium"},
"tags": {"key1": "value1"},
},
).result()
print(response)
# x-ms-original-file: specification/webpubsub/resource-manager/Microsoft.SignalRService/preview/2023-06-01-preview/examples/WebPubSubReplicas_CreateOrUpdate.json
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
1dbfcc3d47f3a48af022c5b19fdcc27352f4d401 | d2b54d3df1dc8f7e88c0d209b35949089facc73f | /treenode/memory.py | b5c7ddd2c1dd51260daf32b36666209d52ca2176 | [
"MIT"
] | permissive | domlysi/django-treenode | df8b08e756884bc8daffdfad7b5b3b102e92e309 | 86e7c76e2b2d60c071cfce6ad1493b2b51f2d304 | refs/heads/master | 2022-12-12T18:10:44.668904 | 2020-08-17T11:01:09 | 2020-08-17T11:01:09 | 287,275,877 | 0 | 0 | MIT | 2020-08-13T12:37:54 | 2020-08-13T12:37:54 | null | UTF-8 | Python | false | false | 522 | py | # -*- coding: utf-8 -*-
from collections import defaultdict
import weakref
__refs__ = defaultdict(weakref.WeakSet)
def clear_refs(cls):
__refs__[cls].clear()
def get_refs(cls):
return __refs__[cls]
def set_ref(cls, obj):
if obj.pk:
__refs__[cls].add(obj)
def update_refs(cls, data):
for obj in get_refs(cls):
obj_key = str(obj.pk)
obj_data = data.get(obj_key)
if obj_data:
for key, value in obj_data.items():
setattr(obj, key, value)
| [
"[email protected]"
] | |
74cff82d3fb4d7b9313cdc8f801d09727367361a | abbb1e132b3d339ba2173129085f252e2f3311dc | /model-optimizer/extensions/middle/RemoveUselessConcatSplit_test.py | 7f91a7e4cbc1b0606f910b5a814c723cf707ab7f | [
"Apache-2.0"
] | permissive | 0xF6/openvino | 56cce18f1eb448e25053fd364bcbc1da9f34debc | 2e6c95f389b195f6d3ff8597147d1f817433cfb3 | refs/heads/master | 2022-12-24T02:49:56.686062 | 2020-09-22T16:05:34 | 2020-09-22T16:05:34 | 297,745,570 | 2 | 0 | Apache-2.0 | 2020-09-22T19:03:06 | 2020-09-22T19:03:04 | null | UTF-8 | Python | false | false | 16,709 | py | """
Copyright (C) 2018-2020 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from extensions.middle.RemoveUselessConcatSplit import RemoveUselessConcatSplitPattern
from mo.front.common.partial_infer.utils import int64_array
from mo.utils.ir_engine.compare_graphs import compare_graphs
from mo.utils.unittest.graph import build_graph
class RemoveUselessConcatSplitTests(unittest.TestCase):
def test_useless_concat_split(self):
graph = build_graph({'br1': {'kind': 'op', 'op': None},
'br_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'br2': {'kind': 'op', 'op': None},
'br_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'br3': {'kind': 'op', 'op': None},
'br_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'concat': {'kind': 'op', 'op': 'Concat'},
'concat_data': {'kind': 'data', 'shape': int64_array([1, 108])},
'split': {'kind': 'op', 'op': 'Split'},
'split_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'split_br1': {'kind': 'op', 'op': None},
'split_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'split_br2': {'kind': 'op', 'op': None},
'split_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'split_br3': {'kind': 'op', 'op': None},
},
[('br1', 'br_data_1'), ('br2', 'br_data_2'), ('br3', 'br_data_3'),
('br_data_1', 'concat', {'in': 0}),
('br_data_2', 'concat', {'in': 1}),
('br_data_3', 'concat', {'in': 2}),
('concat', 'concat_data'),
('concat_data', 'split'),
('split', 'split_data_1', {'out': 0}),
('split', 'split_data_2', {'out': 1}),
('split', 'split_data_3', {'out': 2}),
('split_data_1', 'split_br1'),
('split_data_2', 'split_br2'),
('split_data_3', 'split_br3')])
RemoveUselessConcatSplitPattern().find_and_replace_pattern(graph)
ref_graph = build_graph({'br1': {'kind': 'op', 'op': None},
'br_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'br2': {'kind': 'op', 'op': None},
'br_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'br3': {'kind': 'op', 'op': None},
'br_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'split_br1': {'kind': 'op', 'op': None},
'split_br2': {'kind': 'op', 'op': None},
'split_br3': {'kind': 'op', 'op': None}},
[('br1', 'br_data_1'), ('br2', 'br_data_2'), ('br3', 'br_data_3'),
('br_data_1', 'split_br1'),
('br_data_2', 'split_br2'),
('br_data_3', 'split_br3'),
])
(flag, resp) = compare_graphs(graph, ref_graph, 'split_br3')
self.assertTrue(flag, resp)
def test_usefull_concat_split(self):
graph = build_graph({'br1': {'kind': 'op', 'op': None},
'br_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'br2': {'kind': 'op', 'op': None},
'br_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'br3': {'kind': 'op', 'op': None},
'br_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'concat': {'kind': 'op', 'op': 'Concat'},
'concat_data': {'kind': 'data', 'shape': int64_array([1, 108])},
'split': {'kind': 'op', 'op': 'Split'},
'split_data_1': {'kind': 'data', 'shape': int64_array([1, 36])},
'split_br1': {'kind': 'op', 'op': None},
'split_data_2': {'kind': 'data', 'shape': int64_array([1, 26])},
'split_br2': {'kind': 'op', 'op': None},
'split_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'split_br3': {'kind': 'op', 'op': None},
},
[('br1', 'br_data_1'), ('br2', 'br_data_2'), ('br3', 'br_data_3'),
('br_data_1', 'concat', {'in': 0}),
('br_data_2', 'concat', {'in': 1}),
('br_data_3', 'concat', {'in': 2}),
('concat', 'concat_data'),
('concat_data', 'split'),
('split', 'split_data_1', {'out': 0}),
('split', 'split_data_2', {'out': 1}),
('split', 'split_data_3', {'out': 2}),
('split_data_1', 'split_br1'),
('split_data_2', 'split_br2'),
('split_data_3', 'split_br3')])
RemoveUselessConcatSplitPattern().find_and_replace_pattern(graph)
ref_graph = build_graph({'br1': {'kind': 'op', 'op': None},
'br_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'br2': {'kind': 'op', 'op': None},
'br_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'br3': {'kind': 'op', 'op': None},
'br_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'concat': {'kind': 'op', 'op': 'Concat'},
'concat_data': {'kind': 'data', 'shape': int64_array([1, 108])},
'split': {'kind': 'op', 'op': 'Split'},
'split_data_1': {'kind': 'data', 'shape': int64_array([1, 36])},
'split_br1': {'kind': 'op', 'op': None},
'split_data_2': {'kind': 'data', 'shape': int64_array([1, 26])},
'split_br2': {'kind': 'op', 'op': None},
'split_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'split_br3': {'kind': 'op', 'op': None},
},
[('br1', 'br_data_1'), ('br2', 'br_data_2'), ('br3', 'br_data_3'),
('br_data_1', 'concat', {'in': 0}),
('br_data_2', 'concat', {'in': 1}),
('br_data_3', 'concat', {'in': 2}),
('concat', 'concat_data'),
('concat_data', 'split'),
('split', 'split_data_1', {'out': 0}),
('split', 'split_data_2', {'out': 1}),
('split', 'split_data_3', {'out': 2}),
('split_data_1', 'split_br1'),
('split_data_2', 'split_br2'),
('split_data_3', 'split_br3')])
(flag, resp) = compare_graphs(graph, ref_graph, 'split_br3')
self.assertTrue(flag, resp)
def test_useful_concat_2_outputs_split(self):
graph = build_graph({'br1': {'kind': 'op', 'op': None},
'br_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'br2': {'kind': 'op', 'op': None},
'br_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'br3': {'kind': 'op', 'op': None},
'br_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'concat': {'kind': 'op', 'op': 'Concat'},
'concat_data': {'kind': 'data', 'shape': int64_array([1, 108])},
'placeholder': {'kind': 'op', 'op': None},
'split': {'kind': 'op', 'op': 'Split'},
'split_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'split_br1': {'kind': 'op', 'op': None},
'split_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'split_br2': {'kind': 'op', 'op': None},
'split_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'split_br3': {'kind': 'op', 'op': None},
},
[('br1', 'br_data_1'), ('br2', 'br_data_2'), ('br3', 'br_data_3'),
('br_data_1', 'concat', {'in': 0}),
('br_data_2', 'concat', {'in': 1}),
('br_data_3', 'concat', {'in': 2}),
('concat', 'concat_data'),
('concat_data', 'split'),
('concat_data', 'placeholder'),
('split', 'split_data_1', {'out': 0}),
('split', 'split_data_2', {'out': 1}),
('split', 'split_data_3', {'out': 2}),
('split_data_1', 'split_br1'),
('split_data_2', 'split_br2'),
('split_data_3', 'split_br3')])
RemoveUselessConcatSplitPattern().find_and_replace_pattern(graph)
ref_graph = build_graph({'br1': {'kind': 'op', 'op': None},
'br_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'br2': {'kind': 'op', 'op': None},
'br_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'br3': {'kind': 'op', 'op': None},
'br_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'concat': {'kind': 'op', 'op': 'Concat'},
'concat_data': {'kind': 'data', 'shape': int64_array([1, 108])},
'placeholder': {'kind': 'op', 'op': None},
'split': {'kind': 'op', 'op': 'Split'},
'split_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'split_br1': {'kind': 'op', 'op': None},
'split_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'split_br2': {'kind': 'op', 'op': None},
'split_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'split_br3': {'kind': 'op', 'op': None},
},
[('br1', 'br_data_1'), ('br2', 'br_data_2'), ('br3', 'br_data_3'),
('br_data_1', 'concat', {'in': 0}),
('br_data_2', 'concat', {'in': 1}),
('br_data_3', 'concat', {'in': 2}),
('concat', 'concat_data'),
('concat_data', 'split'),
('concat_data', 'placeholder'),
('split', 'split_data_1', {'out': 0}),
('split', 'split_data_2', {'out': 1}),
('split', 'split_data_3', {'out': 2}),
('split_data_1', 'split_br1'),
('split_data_2', 'split_br2'),
('split_data_3', 'split_br3')])
(flag, resp) = compare_graphs(graph, ref_graph, 'split_br3')
self.assertTrue(flag, resp)
def test_useless_concat_split_2_outputs(self):
graph = build_graph({'br1': {'kind': 'op', 'op': None},
'br_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'br2': {'kind': 'op', 'op': None},
'br_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'br3': {'kind': 'op', 'op': None},
'br_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'concat': {'kind': 'op', 'op': 'Concat'},
'concat_data': {'kind': 'data', 'shape': int64_array([1, 108])},
'split': {'kind': 'op', 'op': 'Split'},
'split_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'split_br1': {'kind': 'op', 'op': None},
'split_br1_1': {'kind': 'op', 'op': None},
'split_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'split_br2': {'kind': 'op', 'op': None},
'split_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'split_br3': {'kind': 'op', 'op': None},
},
[('br1', 'br_data_1'), ('br2', 'br_data_2'), ('br3', 'br_data_3'),
('br_data_1', 'concat', {'in': 0}),
('br_data_2', 'concat', {'in': 1}),
('br_data_3', 'concat', {'in': 2}),
('concat', 'concat_data'),
('concat_data', 'split'),
('split', 'split_data_1', {'out': 0}),
('split', 'split_data_2', {'out': 1}),
('split', 'split_data_3', {'out': 2}),
('split_data_1', 'split_br1'),
('split_data_1', 'split_br1_1'),
('split_data_2', 'split_br2'),
('split_data_3', 'split_br3')])
RemoveUselessConcatSplitPattern().find_and_replace_pattern(graph)
ref_graph = build_graph({'br1': {'kind': 'op', 'op': None},
'br_data_1': {'kind': 'data', 'shape': int64_array([1, 26])},
'br2': {'kind': 'op', 'op': None},
'br_data_2': {'kind': 'data', 'shape': int64_array([1, 36])},
'br3': {'kind': 'op', 'op': None},
'br_data_3': {'kind': 'data', 'shape': int64_array([1, 46])},
'split_br1': {'kind': 'op', 'op': None},
'split_br1_1': {'kind': 'op', 'op': None},
'split_br2': {'kind': 'op', 'op': None},
'split_br3': {'kind': 'op', 'op': None}},
[('br1', 'br_data_1'), ('br2', 'br_data_2'), ('br3', 'br_data_3'),
('br_data_1', 'split_br1'),
('br_data_1', 'split_br1_1'),
('br_data_2', 'split_br2'),
('br_data_3', 'split_br3'),
])
(flag, resp) = compare_graphs(graph, ref_graph, 'split_br3')
self.assertTrue(flag, resp)
| [
"[email protected]"
] | |
d3f832d3e767c0ff4bca2f5cccc70c027d529027 | 850d778687e3692ab2a38d4d2227391d92c21e6b | /atcoder.jp/abc083/abc083_a/Main.py | b29da90d1695a2084ca06588e76990130541eb02 | [] | no_license | Valkyrja3607/AtCoder | 77e2e5e66c0e8e12bb902c35f679119c6576fad7 | 9218a50b1eb83e4498845d15d9dda41fab90ed73 | refs/heads/master | 2023-07-15T20:38:52.911301 | 2018-05-30T17:56:22 | 2018-05-30T17:56:22 | 294,980,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 123 | py | a,b,c,d=map(int,input().split())
if a+b>c+d:
print("Left")
elif a+b<c+d:
print("Right")
else:
print("Balanced") | [
"[email protected]"
] | |
254fb14e235ff8c88fb37c0a2d8073e8cd9249a5 | f474d500b7da4f4069e24fddcde97783a4f3664b | /vagrantEnv/lib/python3.5/encodings/kz1048.py | 8c13cbaa6892121f32a575a95efa7e9b71c453ad | [
"Apache-2.0"
] | permissive | Thanh-Lai/chat-bot | 220a0fd6383181f0cdaf732b5c02f645bd960a28 | e3007fa6e034d3cccff4615a7eccf0e75bbc1708 | refs/heads/master | 2020-04-23T09:39:04.509356 | 2019-02-18T04:56:25 | 2019-02-18T04:56:25 | 171,075,880 | 0 | 0 | Apache-2.0 | 2019-02-18T04:56:26 | 2019-02-17T03:00:39 | Python | UTF-8 | Python | false | false | 38 | py | /usr/lib/python3.5/encodings/kz1048.py | [
"[email protected]"
] | |
925028b08297779546c047873b5ba67c870ad692 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_55/59.py | 09da2bcfaa4c411daa5449e6b502ef93033a8f6c | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,319 | py | #!/usr/bin/env python
import sys
filename=sys.argv[1]
inputfile=file(filename, 'r')
numcases=int(inputfile.readline().strip())
for case in range(1,numcases+1):
R, k, N = map(long, inputfile.readline().strip().split())
g = map(long, inputfile.readline().strip().split())
y = 0
first_ride = [None] * N
ride_groups = [None] * N
ride_seats = [None] * N
ride = 0
start = 0
while ride < R:
if first_ride[start] is not None:
break
ridestart = start
seats = 0
groups = 0
while seats + g[start] <= k and groups < N:
seats += g[start]
groups += 1
start += 1
if start >= N:
start = 0
if start == ridestart:
break
first_ride[ridestart] = ride
ride_groups[ridestart] = groups
ride_seats[ridestart] = seats
ride += 1
y += seats
if ride < R:
cyclelen = ride - first_ride[start]
if R - ride >= cyclelen:
cycles = (R - ride) / cyclelen
cycle_euros = 0
cycle_start = start
while True:
cycle_euros += ride_seats[start]
start = (start + ride_groups[start]) % N
ride += 1
if start == cycle_start:
break
y += cycle_euros * cycles
ride += (cycles - 1) * cyclelen
while ride < R:
y += ride_seats[start]
start = (start + ride_groups[start]) % N
ride += 1
print "Case #%d: %d" % (case, y)
| [
"[email protected]"
] | |
eeb5073afecbaf0f35097a0d4970f139fc0282fd | 014e9a6f3d48ffa7b9ee759904d2e33284a6f4d6 | /api/caoloapi/model/auth.py | c73941f6992e52e8c9728cbae96791221e95e3a7 | [
"MIT"
] | permissive | kissmikijr/caolo-backend | 33c0262239182b96d1215677c45065b4ef90455b | efec05bb793bd40951cb4e5ae4e930d972f63d36 | refs/heads/master | 2023-09-04T01:09:50.068148 | 2021-10-18T22:00:59 | 2021-10-18T22:06:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,079 | py | from datetime import datetime as dt, timedelta
from passlib.context import CryptContext
from jose import jwt
SECRET_KEY = "fe9fb923daa2a5c34a57b6da5d807a1e9cb48d4afee5c10095bab37bcf860059"
ALGORITHM = "HS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 30
PEPPER_RANGE = (128, 139, 3)
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
def __concatpw(pw: str, salt: str, pepper):
return f"{pw}{salt}{pepper}"
def verifypw(plain, salt, pepper, hashed_pw):
pw = __concatpw(plain, salt, pepper)
return pwd_context.verify(pw, hashed_pw)
def hashpw(pw: str, salt: str, pepper):
return pwd_context.hash(__concatpw(pw, salt, pepper))
def create_access_token(data: dict):
payload = data.copy()
payload.update({"exp": dt.utcnow() + timedelta(minutes=15)})
return jwt.encode(payload, SECRET_KEY, algorithm=ALGORITHM)
def decode_access_token(token: str):
"""
raises jose.JWTError or AssertionError on invalid token
"""
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
assert "sub" in payload
return payload
| [
"[email protected]"
] | |
3e1738529ae55e62ae6843901eca2eb0d436e07a | 6189f34eff2831e3e727cd7c5e43bc5b591adffc | /WebMirror/management/rss_parser_funcs/feed_parse_extractIntheinkpotfictionWordpressCom.py | 5a22827f09f4623da612321d5379b4873ab2b614 | [
"BSD-3-Clause"
] | permissive | fake-name/ReadableWebProxy | 24603660b204a9e7965cfdd4a942ff62d7711e27 | ca2e086818433abc08c014dd06bfd22d4985ea2a | refs/heads/master | 2023-09-04T03:54:50.043051 | 2023-08-26T16:08:46 | 2023-08-26T16:08:46 | 39,611,770 | 207 | 20 | BSD-3-Clause | 2023-09-11T15:48:15 | 2015-07-24T04:30:43 | Python | UTF-8 | Python | false | false | 576 | py |
def extractIntheinkpotfictionWordpressCom(item):
'''
Parser for 'intheinkpotfiction.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| [
"[email protected]"
] | |
15487621d75896236eb3ebe106a4f8748a6a389b | e43b78db4ff598944e58e593610f537f3833d79c | /py-faster-rcnn/lib/roi_data_layer/roidb.py | 93f713e1f127d432736a654ce6fa292eef3b6c67 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"BSD-2-Clause"
] | permissive | ZJUZQ/Net_caffe | 577e9b3e80a391d772a21c27639465d539fceb1f | bed3c7384a259339c5a0fb2ea34fa0cdd32ddd29 | refs/heads/master | 2021-09-08T12:19:37.039970 | 2018-03-09T14:44:24 | 2018-03-09T14:44:24 | 114,853,721 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,356 | py | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
"""Transform a roidb into a trainable roidb by adding a bunch of metadata."""
import numpy as np
from fast_rcnn.config import cfg
from fast_rcnn.bbox_transform import bbox_transform
from utils.cython_bbox import bbox_overlaps
import PIL
def prepare_roidb(imdb):
"""Enrich the imdb's roidb by adding some derived quantities that
are useful for training. This function precomputes the maximum
overlap, taken over ground-truth boxes, between each ROI and
each ground-truth box. The class with maximum overlap is also
recorded.
"""
sizes = [PIL.Image.open(imdb.image_path_at(i)).size
for i in xrange(imdb.num_images)]
roidb = imdb.roidb
# roidb is a list of dictionaries, each with the following keys:
# boxes
# gt_overlaps
# gt_classes
# flipped
for i in xrange(len(imdb.image_index)):
roidb[i]['image'] = imdb.image_path_at(i)
roidb[i]['width'] = sizes[i][0]
roidb[i]['height'] = sizes[i][1]
# need gt_overlaps as a dense array for argmax
gt_overlaps = roidb[i]['gt_overlaps'].toarray()
# max overlap with gt over classes (columns)
max_overlaps = gt_overlaps.max(axis=1)
# gt class that had the max overlap
max_classes = gt_overlaps.argmax(axis=1)
roidb[i]['max_classes'] = max_classes ## gt class that had the max overlap (columns)
roidb[i]['max_overlaps'] = max_overlaps ## max overlap with gt over classes (columns)
# sanity checks
# max overlap of 0 => class should be zero (background)
zero_inds = np.where(max_overlaps == 0)[0]
assert all(max_classes[zero_inds] == 0)
# max overlap > 0 => class should not be zero (must be a fg class)
nonzero_inds = np.where(max_overlaps > 0)[0]
assert all(max_classes[nonzero_inds] != 0)
def add_bbox_regression_targets(roidb):
"""Add information needed to train bounding-box regressors."""
assert len(roidb) > 0
assert 'max_classes' in roidb[0], 'Did you call prepare_roidb first?'
num_images = len(roidb)
# Infer number of classes from the number of columns in gt_overlaps
num_classes = roidb[0]['gt_overlaps'].shape[1]
for im_i in xrange(num_images):
rois = roidb[im_i]['boxes']
max_overlaps = roidb[im_i]['max_overlaps']
max_classes = roidb[im_i]['max_classes']
roidb[im_i]['bbox_targets'] = \
_compute_targets(rois, overlaps=max_overlaps, labels=max_classes) # Compute bounding-box regression targets for an image
if cfg.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED: ## RPN can only use precomputed normalization because there are no fixed statistics to compute a priori
# Use fixed / precomputed "means" and "stds" instead of empirical values
means = np.tile(
np.array(cfg.TRAIN.BBOX_NORMALIZE_MEANS), (num_classes, 1)) # shape = (num_classes, 4)
stds = np.tile(
np.array(cfg.TRAIN.BBOX_NORMALIZE_STDS), (num_classes, 1)) # shape = (num_classes, 4)
else:
# Compute values needed for means and stds
# var(x) = E(x^2) - E(x)^2
class_counts = np.zeros((num_classes, 1)) + cfg.EPS
sums = np.zeros((num_classes, 4))
squared_sums = np.zeros((num_classes, 4))
for im_i in xrange(num_images):
targets = roidb[im_i]['bbox_targets']
for cls in xrange(1, num_classes):
cls_inds = np.where(targets[:, 0] == cls)[0]
if cls_inds.size > 0:
class_counts[cls] += cls_inds.size
sums[cls, :] += targets[cls_inds, 1:].sum(axis=0)
squared_sums[cls, :] += \
(targets[cls_inds, 1:] ** 2).sum(axis=0)
means = sums / class_counts
stds = np.sqrt(squared_sums / class_counts - means ** 2)
print 'bbox target means:'
print means
print means[1:, :].mean(axis=0) # ignore bg class
print 'bbox target stdevs:'
print stds
print stds[1:, :].mean(axis=0) # ignore bg class
# Normalize targets
if cfg.TRAIN.BBOX_NORMALIZE_TARGETS:
print "Normalizing targets"
for im_i in xrange(num_images):
targets = roidb[im_i]['bbox_targets']
for cls in xrange(1, num_classes):
cls_inds = np.where(targets[:, 0] == cls)[0]
roidb[im_i]['bbox_targets'][cls_inds, 1:] -= means[cls, :]
roidb[im_i]['bbox_targets'][cls_inds, 1:] /= stds[cls, :]
else:
print "NOT normalizing targets"
# These values will be needed for making predictions
# (the predicts will need to be unnormalized and uncentered)
return means.ravel(), stds.ravel() ## Return a contiguous flattened array
def _compute_targets(rois, overlaps, labels):
"""Compute bounding-box regression targets for an image."""
"""
overlaps: max_overlaps of rois
labels: max_classes of rois
return:
[[cls, dx, dy, dw, dh]
...
]
"""
# Indices of ground-truth ROIs
gt_inds = np.where(overlaps == 1)[0]
if len(gt_inds) == 0:
# Fail if the image has no ground-truth ROIs
return np.zeros((rois.shape[0], 5), dtype=np.float32)
# Indices of examples for which we try to make predictions
ex_inds = np.where(overlaps >= cfg.TRAIN.BBOX_THRESH)[0] ## e.g., 0.5
# Get IoU overlap between each ex ROI and gt ROI
ex_gt_overlaps = bbox_overlaps(
np.ascontiguousarray(rois[ex_inds, :], dtype=np.float),
np.ascontiguousarray(rois[gt_inds, :], dtype=np.float))
# Find which gt ROI each ex ROI has max overlap with:
# this will be the ex ROI's gt target
gt_assignment = ex_gt_overlaps.argmax(axis=1)
gt_rois = rois[gt_inds[gt_assignment], :]
ex_rois = rois[ex_inds, :]
targets = np.zeros((rois.shape[0], 5), dtype=np.float32)
targets[ex_inds, 0] = labels[ex_inds]
targets[ex_inds, 1:] = bbox_transform(ex_rois, gt_rois) # compute [dx, dy, dw, dh]
return targets
| [
"[email protected]"
] | |
ad73927538d2a6b51e3e9da4eaa96818ced5e08a | f714db4463dd37fc33382364dc4b1963a9053e49 | /tests/sentry/event_manager/interfaces/test_frame.py | 22dd3b8b5756050429bafb0bd12c3db6daa422ae | [
"BUSL-1.1",
"Apache-2.0"
] | permissive | macher91/sentry | 92171c2ad23564bf52627fcd711855685b138cbd | dd94d574403c95eaea6d4ccf93526577f3d9261b | refs/heads/master | 2021-07-07T08:23:53.339912 | 2020-07-21T08:03:55 | 2020-07-21T08:03:55 | 140,079,930 | 0 | 0 | BSD-3-Clause | 2020-05-13T11:28:35 | 2018-07-07T11:50:48 | Python | UTF-8 | Python | false | false | 1,366 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import pytest
from sentry import eventstore
from sentry.event_manager import EventManager
@pytest.fixture
def make_frames_snapshot(insta_snapshot):
def inner(data):
mgr = EventManager(data={"stacktrace": {"frames": [data]}})
mgr.normalize()
evt = eventstore.create_event(data=mgr.get_data())
frame = evt.interfaces["stacktrace"].frames[0]
insta_snapshot({"errors": evt.data.get("errors"), "to_json": frame.to_json()})
return inner
@pytest.mark.parametrize(
"input",
[
{"filename": 1},
{"filename": "foo", "abs_path": 1},
{"function": 1},
{"module": 1},
{"function": "?"},
],
)
def test_bad_input(make_frames_snapshot, input):
make_frames_snapshot(input)
@pytest.mark.parametrize(
"x", [float("inf"), float("-inf"), float("nan")], ids=["inf", "neginf", "nan"]
)
def test_context_with_nan(make_frames_snapshot, x):
make_frames_snapshot({"filename": "x", "vars": {"x": x}})
def test_address_normalization(make_frames_snapshot):
make_frames_snapshot(
{
"lineno": 1,
"filename": "blah.c",
"function": "main",
"instruction_addr": 123456,
"symbol_addr": "123450",
"image_addr": "0x0",
}
)
| [
"[email protected]"
] | |
f4f46508d1a0f02512ff3ef04f883f5f7004be63 | 1bc2a635a93b5bc84606edf9ac2226851cac9e6d | /tests/unit/test_business.py | 99dba73500a5ba0dccb4d31c5d763654cfe9ff9d | [
"MIT"
] | permissive | coolkat64/rolling | 819149cbb1e11a455b93a030477f9da91e2f93e4 | 4c3ee2401128e993a52ac9b52cdbd32e17728129 | refs/heads/master | 2022-11-29T00:35:14.058665 | 2020-07-31T20:37:15 | 2020-07-31T20:37:15 | 285,312,272 | 0 | 0 | MIT | 2020-08-05T14:25:48 | 2020-08-05T14:25:47 | null | UTF-8 | Python | false | false | 37,466 | py | # coding: utf-8
import typing
from aiohttp import ClientResponse
from aiohttp.test_utils import TestClient
import pytest
from rolling.kernel import Kernel
from rolling.model.character import CharacterModel
from rolling.server.controller.business import ALL_OF_THEM
from rolling.server.controller.business import ONE_OF_THEM
from rolling.server.document.business import OfferDocument
from rolling.server.document.business import OfferItemDocument
from rolling.server.document.business import OfferItemPosition
from rolling.server.document.business import OfferOperand
from rolling.server.document.business import OfferStatus
from rolling.server.document.universe import UniverseStateDocument
from tests.fixtures import create_stuff
from tests.fixtures import description_serializer
EXPECTED_PLASTIC_BOTTLE_NAME = "Plastic bottle (1)"
EXPECTED_PLASTIC_BOTTLE_NAME_ = "(!) Plastic bottle (1)"
def _add_items(kernel: Kernel, offer_id: int) -> None:
kernel.server_db_session.add(
OfferItemDocument(
offer_id=offer_id,
position=OfferItemPosition.REQUEST.value,
resource_id="RED_WINE",
quantity=1.5,
)
)
kernel.server_db_session.add(
OfferItemDocument(
offer_id=offer_id,
position=OfferItemPosition.REQUEST.value,
stuff_id="STONE_HAXE",
quantity=1,
)
)
kernel.server_db_session.add(
OfferItemDocument(
offer_id=offer_id,
position=OfferItemPosition.OFFER.value,
resource_id="WOOD",
quantity=0.5,
)
)
kernel.server_db_session.add(
OfferItemDocument(
offer_id=offer_id,
position=OfferItemPosition.OFFER.value,
stuff_id="LEATHER_JACKET",
quantity=1,
)
)
@pytest.fixture
def xena_permanent_or_offer(worldmapc_xena_model: CharacterModel, worldmapc_kernel: Kernel):
offer_doc = OfferDocument(
character_id=worldmapc_xena_model.id,
title="OfferTitle",
request_operand=OfferOperand.OR.value,
offer_operand=OfferOperand.OR.value,
permanent=True,
status=OfferStatus.OPEN.value,
)
worldmapc_kernel.server_db_session.add(offer_doc)
worldmapc_kernel.server_db_session.commit()
_add_items(worldmapc_kernel, offer_doc.id)
worldmapc_kernel.server_db_session.commit()
return offer_doc
@pytest.fixture
def xena_permanent_and_offer(worldmapc_xena_model: CharacterModel, worldmapc_kernel: Kernel):
offer_doc = OfferDocument(
character_id=worldmapc_xena_model.id,
title="OfferTitle",
request_operand=OfferOperand.AND.value,
offer_operand=OfferOperand.AND.value,
permanent=True,
status=OfferStatus.OPEN.value,
)
worldmapc_kernel.server_db_session.add(offer_doc)
worldmapc_kernel.server_db_session.commit()
_add_items(worldmapc_kernel, offer_doc.id)
worldmapc_kernel.server_db_session.commit()
return offer_doc
class TestBusiness:
async def _assert_owned_offers(
self,
kernel: Kernel,
web: TestClient,
character: CharacterModel,
count: int,
names: typing.Optional[typing.List[str]] = None,
) -> None:
names = names or []
# main page
resp: ClientResponse = await web.post(f"/business/{character.id}")
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label for i in descr.items]
assert f"Voir les offres que vous proposez ({count} en cours)" in item_labels
if not names:
return
# offers page
resp: ClientResponse = await web.post(f"/business/{character.id}/offers")
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label for i in descr.items]
for name in names:
assert next(l for l in item_labels if name in str(l))
async def _assert_edit_offer(
self,
kernel: Kernel,
web: TestClient,
character: CharacterModel,
offer_id: int,
request_operand_str: str = ONE_OF_THEM,
request_item_names: typing.Optional[typing.List[str]] = None,
request_item_names_not: typing.Optional[typing.List[str]] = None,
offer_operand_str: str = ONE_OF_THEM,
offer_item_names: typing.Optional[typing.List[str]] = None,
offer_item_names_not: typing.Optional[typing.List[str]] = None,
open_: bool = False,
) -> None:
request_item_names = request_item_names or []
request_item_names_not = request_item_names_not or []
offer_item_names = offer_item_names or []
offer_item_names_not = offer_item_names_not or []
resp = await web.post(f"/business/{character.id}/offers/{offer_id}")
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
form_item_by_name = {i.name: i for i in descr.items[0].items}
form_item_labels = [i.label for i in descr.items[0].items]
assert form_item_by_name["request_operand"].value == request_operand_str
assert form_item_by_name["offer_operand"].value == offer_operand_str
for request_item_name in request_item_names:
assert request_item_name in form_item_labels
for offer_item_name in offer_item_names:
assert offer_item_name in form_item_labels
for request_item_name_not in request_item_names_not:
assert request_item_name_not not in form_item_labels
for offer_item_name_not in offer_item_names_not:
assert offer_item_name_not not in form_item_labels
if not open_:
assert "Activer" == descr.items[1].label
else:
assert "Désactiver" == descr.items[1].label
async def _assert_read_offer(
self,
kernel: Kernel,
web: TestClient,
owner: CharacterModel,
character: CharacterModel,
offer_id: int,
request_operand_str: str = ONE_OF_THEM,
have_not_item_names: typing.Optional[typing.List[str]] = None,
have_item_names: typing.Optional[typing.List[str]] = None,
offer_operand_str: str = ONE_OF_THEM,
offer_item_names: typing.Optional[typing.List[str]] = None,
owner_can_make_deal: bool = True,
can_make_deal: bool = False,
) -> None:
have_not_item_names = have_not_item_names or []
have_item_names = have_item_names or []
offer_item_names = offer_item_names or []
resp = await web.post(f"/business/{character.id}/see-offer/{owner.id}/{offer_id}")
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
form_item_labels = [i.label or i.text for i in descr.items]
assert f"Eléments demandé(s) ({request_operand_str})" in form_item_labels
assert f"Eléments donné(s) ({offer_operand_str})" in form_item_labels
for have_not_item_name in have_not_item_names:
assert f"(X) {have_not_item_name}" in form_item_labels
for have_item_name in have_item_names:
assert f"(V) {have_item_name}" in form_item_labels
for offer_item_name in offer_item_names:
assert offer_item_name in form_item_labels
if owner_can_make_deal:
if can_make_deal:
assert "Effectuer une transaction" in form_item_labels
else:
assert "Vous ne possédez pas de quoi faire un marché" in form_item_labels
else:
assert f"{owner.name} ne peut pas assurer cette opération"
async def test_create_offer__nominal_case(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
) -> None:
xena = worldmapc_xena_model
kernel = worldmapc_kernel
web = worldmapc_web_app
await self._assert_owned_offers(kernel, web, xena, count=0)
resp = await web.post(f"/business/{xena.id}/offers-create?permanent=1")
assert 200 == resp.status
resp = await web.post(
f"/business/{xena.id}/offers-create?permanent=1", json={"title": "My offer"}
)
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
assert descr.redirect == f"/business/{xena.id}/offers/1"
await self._assert_owned_offers(kernel, web, xena, count=1, names=["My offer"])
await self._assert_edit_offer(
kernel,
web,
xena,
offer_id=1,
request_operand_str=ONE_OF_THEM,
request_item_names=[],
offer_operand_str=ONE_OF_THEM,
offer_item_names=[],
open_=False,
)
async def test_create_offer__change_operands(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
) -> None:
xena = worldmapc_xena_model
kernel = worldmapc_kernel
web = worldmapc_web_app
await self._assert_owned_offers(kernel, web, xena, count=0)
# see test_create_offer__nominal_case if in error
assert (
await web.post(
f"/business/{xena.id}/offers-create?permanent=1", json={"title": "My offer"}
)
).status == 200
assert (
await web.post(
f"/business/{xena.id}/offers/{1}",
json={"request_operand": ALL_OF_THEM, "offer_operand": ALL_OF_THEM},
)
).status == 200
await self._assert_edit_offer(
kernel,
web,
xena,
offer_id=1,
request_operand_str=ALL_OF_THEM,
offer_operand_str=ALL_OF_THEM,
)
async def test_create_offer__open_close(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
) -> None:
xena = worldmapc_xena_model
kernel = worldmapc_kernel
web = worldmapc_web_app
await self._assert_owned_offers(kernel, web, xena, count=0)
# see test_create_offer__nominal_case if in error
assert (
await web.post(
f"/business/{xena.id}/offers-create?permanent=1", json={"title": "My offer"}
)
).status == 200
assert (await web.post(f"/business/{xena.id}/offers/{1}?open=1")).status == 200
await self._assert_edit_offer(kernel, web, xena, offer_id=1, open_=True)
await self._assert_owned_offers(kernel, web, xena, count=1, names=["(V) My offer"])
assert (await web.post(f"/business/{xena.id}/offers/{1}?close=1")).status == 200
await self._assert_edit_offer(kernel, web, xena, offer_id=1, open_=False)
await self._assert_owned_offers(kernel, web, xena, count=1, names=["(X) My offer"])
async def test_add_items__check_form(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
) -> None:
xena = worldmapc_xena_model
kernel = worldmapc_kernel
web = worldmapc_web_app
assert (
await web.post(
f"/business/{xena.id}/offers-create?permanent=1", json={"title": "My offer"}
)
).status == 200
resp = await web.post(f"/business/{xena.id}/offers/{1}/add-item?position=REQUEST")
assert resp.status == 200
descr = description_serializer.load(await resp.json())
assert descr.items[0].is_form
assert descr.items[0].items[0].name == "value"
for name in [
"Bois (mètre cubes)",
"Vin rouge (litres)",
"Plastic bottle (unité)",
"Bouclier de bois (unité)",
"Hache de pierre (unité)",
"Veste de cuir (unité)",
"Pierre (unités)",
"Corps (unité)",
"Petit bois (mètre cubes)",
]:
assert name in descr.items[0].items[0].choices
assert descr.items[0].items[1].name == "quantity"
async def test_update_offer__have_some_required__request_and(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
) -> None:
xena = worldmapc_xena_model
kernel = worldmapc_kernel
web = worldmapc_web_app
await self._assert_owned_offers(kernel, web, xena, count=0)
# see test_create_offer__nominal_case if in error
assert (
await web.post(
f"/business/{xena.id}/offers-create?permanent=1", json={"title": "My offer"}
)
).status == 200
# Add one stuff
assert (
await web.post(
f"/business/{xena.id}/offers/{1}/add-item"
f"?position=REQUEST&value=Plastic bottle (unité)&quantity=1"
)
).status == 200
await self._assert_edit_offer(
kernel, web, xena, offer_id=1, request_item_names=[EXPECTED_PLASTIC_BOTTLE_NAME]
)
# Add one resource
assert (
await web.post(
f"/business/{xena.id}/offers/{1}/add-item"
f"?position=REQUEST&value=Petit bois (mètre cubes)&quantity=1.50"
)
).status == 200
await self._assert_edit_offer(
kernel,
web,
xena,
offer_id=1,
request_item_names=[EXPECTED_PLASTIC_BOTTLE_NAME, "Petit bois (1.5 mètre cubes)"],
)
async def test_update_offer__have_some_required__remove_item(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
) -> None:
xena = worldmapc_xena_model
kernel = worldmapc_kernel
web = worldmapc_web_app
await self._assert_owned_offers(kernel, web, xena, count=0)
# see test_create_offer__nominal_case if in error
assert (
await web.post(
f"/business/{xena.id}/offers-create?permanent=1", json={"title": "My offer"}
)
).status == 200
# Add one stuff
assert (
await web.post(
f"/business/{xena.id}/offers/{1}/add-item?position=REQUEST&value=Plastic bottle (unité)&quantity=1"
)
).status == 200
await self._assert_edit_offer(
kernel, web, xena, offer_id=1, request_item_names=[EXPECTED_PLASTIC_BOTTLE_NAME]
)
# remove it
assert (await web.post(f"/business/{xena.id}/offers/{1}/remove-item/{1}")).status == 200
await self._assert_edit_offer(
kernel, web, xena, offer_id=1, request_item_names_not=[EXPECTED_PLASTIC_BOTTLE_NAME]
)
async def test_edit_offer__test_owner_have_display(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_arthur_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
xena_permanent_and_offer: OfferDocument,
) -> None:
xena = worldmapc_xena_model
kernel = worldmapc_kernel
web = worldmapc_web_app
await self._assert_edit_offer(
kernel,
web,
xena,
offer_id=1,
offer_item_names=["(X) Bois (0.5 mètre cubes)", "(X) Veste de cuir (1)"],
request_operand_str=ALL_OF_THEM,
offer_operand_str=ALL_OF_THEM,
open_=True,
)
# add one to offer owner
kernel.resource_lib.add_resource_to("WOOD", 0.5, character_id=xena.id)
await self._assert_edit_offer(
kernel,
web,
xena,
offer_id=1,
offer_item_names=["Bois (0.5 mètre cubes)", "(X) Veste de cuir (1)"],
request_operand_str=ALL_OF_THEM,
offer_operand_str=ALL_OF_THEM,
open_=True,
)
# add one to offer owner
jacket = create_stuff(kernel, "LEATHER_JACKET")
kernel.stuff_lib.set_carried_by(jacket.id, character_id=xena.id)
await self._assert_edit_offer(
kernel,
web,
xena,
offer_id=1,
request_item_names=["Bois (0.5 mètre cubes)", "Veste de cuir (1)"],
request_operand_str=ALL_OF_THEM,
offer_operand_str=ALL_OF_THEM,
open_=True,
)
async def test_read_offer__have_some_required_items__and(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_arthur_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
xena_permanent_and_offer: OfferDocument,
) -> None:
xena = worldmapc_xena_model
arthur = worldmapc_arthur_model
kernel = worldmapc_kernel
web = worldmapc_web_app
offer = xena_permanent_and_offer
await self._assert_read_offer(
kernel,
web,
xena,
arthur,
offer_id=offer.id,
request_operand_str=ALL_OF_THEM,
offer_operand_str=ALL_OF_THEM,
have_not_item_names=["Vin rouge (1.5 litres)", "Hache de pierre (1)"],
offer_item_names=["(!) Bois (0.5 mètre cubes)", "(!) Veste de cuir (1)"],
owner_can_make_deal=False,
)
kernel.resource_lib.add_resource_to("RED_WINE", 2.0, character_id=arthur.id)
await self._assert_read_offer(
kernel,
web,
xena,
arthur,
offer_id=offer.id,
request_operand_str=ALL_OF_THEM,
offer_operand_str=ALL_OF_THEM,
have_not_item_names=["Hache de pierre (1)"],
have_item_names=["Vin rouge (1.5 litres)"],
owner_can_make_deal=False,
)
haxe = create_stuff(kernel, "STONE_HAXE")
kernel.stuff_lib.set_carried_by(haxe.id, character_id=arthur.id)
await self._assert_read_offer(
kernel,
web,
xena,
arthur,
offer_id=offer.id,
request_operand_str=ALL_OF_THEM,
offer_operand_str=ALL_OF_THEM,
have_item_names=["Vin rouge (1.5 litres)", "Hache de pierre (1)"],
owner_can_make_deal=False,
)
# add wood to offer owner (remove the (!))
kernel.resource_lib.add_resource_to("WOOD", 0.5, character_id=xena.id)
await self._assert_read_offer(
kernel,
web,
xena,
arthur,
offer_id=offer.id,
request_operand_str=ALL_OF_THEM,
offer_operand_str=ALL_OF_THEM,
offer_item_names=["Bois (0.5 mètre cubes)", "(!) Veste de cuir (1)"],
owner_can_make_deal=False,
)
# add jacket to offer owner (remove the (!))
jacket = create_stuff(kernel, "LEATHER_JACKET")
kernel.stuff_lib.set_carried_by(jacket.id, character_id=xena.id)
await self._assert_read_offer(
kernel,
web,
xena,
arthur,
offer_id=offer.id,
request_operand_str=ALL_OF_THEM,
offer_operand_str=ALL_OF_THEM,
offer_item_names=["Bois (0.5 mètre cubes)", "Veste de cuir (1)"],
owner_can_make_deal=True,
can_make_deal=True,
)
async def test_read_offer__have_some_required_items__or(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_arthur_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
xena_permanent_or_offer: OfferDocument,
) -> None:
xena = worldmapc_xena_model
arthur = worldmapc_arthur_model
kernel = worldmapc_kernel
web = worldmapc_web_app
offer = xena_permanent_or_offer
# ensure xena have all offered items
kernel.resource_lib.add_resource_to("WOOD", 0.5, character_id=xena.id)
jacket = create_stuff(kernel, "LEATHER_JACKET")
kernel.stuff_lib.set_carried_by(jacket.id, character_id=xena.id)
await self._assert_read_offer(
kernel,
web,
xena,
arthur,
offer_id=offer.id,
request_operand_str=ONE_OF_THEM,
offer_operand_str=ONE_OF_THEM,
have_not_item_names=["Vin rouge (1.5 litres)", "Hache de pierre (1)"],
offer_item_names=["Bois (0.5 mètre cubes)", "Veste de cuir (1)"],
can_make_deal=False,
)
kernel.resource_lib.add_resource_to("RED_WINE", 2.0, character_id=arthur.id)
await self._assert_read_offer(
kernel,
web,
xena,
arthur,
offer_id=offer.id,
request_operand_str=ONE_OF_THEM,
offer_operand_str=ONE_OF_THEM,
have_not_item_names=["Hache de pierre (1)"],
have_item_names=["Vin rouge (1.5 litres)"],
can_make_deal=True,
)
haxe = create_stuff(kernel, "STONE_HAXE")
kernel.stuff_lib.set_carried_by(haxe.id, character_id=arthur.id)
await self._assert_read_offer(
kernel,
web,
xena,
arthur,
offer_id=offer.id,
request_operand_str=ONE_OF_THEM,
offer_operand_str=ONE_OF_THEM,
have_item_names=["Vin rouge (1.5 litres)", "Hache de pierre (1)"],
can_make_deal=True,
)
async def test_read_offer__make_transaction__missing_request_and(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_arthur_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
xena_permanent_and_offer: OfferDocument,
) -> None:
xena = worldmapc_xena_model
arthur = worldmapc_arthur_model
kernel = worldmapc_kernel
web = worldmapc_web_app
offer = xena_permanent_and_offer
# ensure xena have all offered items
kernel.resource_lib.add_resource_to("WOOD", 0.5, character_id=xena.id)
jacket = create_stuff(kernel, "LEATHER_JACKET")
kernel.stuff_lib.set_carried_by(jacket.id, character_id=xena.id)
# Give just a part of necessary to arthur
kernel.resource_lib.add_resource_to("RED_WINE", 2.0, character_id=arthur.id)
resp = await web.post(
f"/business/{arthur.id}/see-offer/{offer.character_id}/{offer.id}/deal"
)
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label or i.text for i in descr.items]
assert "Vous ne possédez pas ce qu'il faut pour faire ce marché" in item_labels
async def test_read_offer__make_transaction__owner_missing_offer_and(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_arthur_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
xena_permanent_and_offer: OfferDocument,
) -> None:
xena = worldmapc_xena_model
arthur = worldmapc_arthur_model
kernel = worldmapc_kernel
web = worldmapc_web_app
offer = xena_permanent_and_offer
# xena have just a part of offered items
kernel.resource_lib.add_resource_to("WOOD", 0.5, character_id=xena.id)
resp = await web.post(
f"/business/{arthur.id}/see-offer/{offer.character_id}/{offer.id}/deal"
)
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label or i.text for i in descr.items]
assert f"{xena.name} ne peut pas assurer cette opération" in item_labels
async def test_read_offer__make_transaction__request_and(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_arthur_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
xena_permanent_and_offer: OfferDocument,
initial_universe_state: UniverseStateDocument,
) -> None:
xena = worldmapc_xena_model
arthur = worldmapc_arthur_model
kernel = worldmapc_kernel
web = worldmapc_web_app
offer = xena_permanent_and_offer
# Give all necessary to arthur
kernel.resource_lib.add_resource_to("RED_WINE", 2.0, character_id=arthur.id)
haxe = create_stuff(kernel, "STONE_HAXE")
kernel.stuff_lib.set_carried_by(haxe.id, character_id=arthur.id)
# ensure xena have all offered items
kernel.resource_lib.add_resource_to("WOOD", 0.5, character_id=xena.id)
jacket = create_stuff(kernel, "LEATHER_JACKET")
kernel.stuff_lib.set_carried_by(jacket.id, character_id=xena.id)
assert kernel.resource_lib.have_resource(xena.id, "WOOD", 0.5)
assert kernel.stuff_lib.have_stuff_count(xena.id, "LEATHER_JACKET")
assert not kernel.resource_lib.have_resource(xena.id, "RED_WINE", 1.5)
assert not kernel.stuff_lib.have_stuff_count(xena.id, "STONE_HAXE")
assert not kernel.resource_lib.have_resource(arthur.id, "WOOD", 0.5)
assert not kernel.stuff_lib.have_stuff_count(arthur.id, "LEATHER_JACKET")
assert kernel.resource_lib.have_resource(arthur.id, "RED_WINE", 1.5)
assert kernel.stuff_lib.have_stuff_count(arthur.id, "STONE_HAXE")
resp = await web.post(
f"/business/{arthur.id}/see-offer/{offer.character_id}/{offer.id}/deal"
)
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label or i.text for i in descr.items]
assert "Je confirme vouloir faire ce marché" in item_labels
# Do the deal
resp = await web.post(
f"/business/{arthur.id}/see-offer/{offer.character_id}/{offer.id}/deal?confirm=1"
)
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label or i.text for i in descr.items]
assert "Marché effectué" in item_labels
assert not kernel.resource_lib.have_resource(xena.id, "WOOD", 0.5)
assert not kernel.stuff_lib.have_stuff_count(xena.id, "LEATHER_JACKET")
assert kernel.resource_lib.have_resource(xena.id, "RED_WINE", 1.5)
assert kernel.stuff_lib.have_stuff_count(xena.id, "STONE_HAXE")
assert kernel.resource_lib.have_resource(arthur.id, "WOOD", 0.5)
assert kernel.stuff_lib.have_stuff_count(arthur.id, "LEATHER_JACKET")
assert not kernel.resource_lib.have_resource(arthur.id, "RED_WINE", 1.5)
assert not kernel.stuff_lib.have_stuff_count(arthur.id, "STONE_HAXE")
async def test_read_offer__make_transaction__missing_all_request_or(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_arthur_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
xena_permanent_or_offer: OfferDocument,
) -> None:
xena = worldmapc_xena_model
arthur = worldmapc_arthur_model
kernel = worldmapc_kernel
web = worldmapc_web_app
offer = xena_permanent_or_offer
# ensure xena have all offered items
kernel.resource_lib.add_resource_to("WOOD", 0.5, character_id=xena.id)
jacket = create_stuff(kernel, "LEATHER_JACKET")
kernel.stuff_lib.set_carried_by(jacket.id, character_id=xena.id)
resp = await web.post(
f"/business/{arthur.id}/see-offer/{offer.character_id}/{offer.id}/deal"
)
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label or i.text for i in descr.items]
assert "Vous ne possédez pas ce qu'il faut pour faire ce marché" in item_labels
async def test_read_offer__make_transaction__request_or(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_arthur_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
xena_permanent_or_offer: OfferDocument,
initial_universe_state: UniverseStateDocument,
) -> None:
xena = worldmapc_xena_model
arthur = worldmapc_arthur_model
kernel = worldmapc_kernel
web = worldmapc_web_app
offer = xena_permanent_or_offer
# ensure xena have one of offered items
kernel.resource_lib.add_resource_to("WOOD", 0.5, character_id=xena.id)
# Give all necessary to arthur
kernel.resource_lib.add_resource_to("RED_WINE", 1.5, character_id=arthur.id)
haxe = create_stuff(kernel, "STONE_HAXE")
kernel.stuff_lib.set_carried_by(haxe.id, character_id=arthur.id)
assert kernel.resource_lib.have_resource(xena.id, "WOOD", 0.5)
assert not kernel.resource_lib.have_resource(xena.id, "RED_WINE", 1.5)
assert not kernel.stuff_lib.have_stuff_count(xena.id, "STONE_HAXE")
assert not kernel.resource_lib.have_resource(arthur.id, "WOOD", 0.5)
assert kernel.resource_lib.have_resource(arthur.id, "RED_WINE", 1.5)
assert kernel.stuff_lib.have_stuff_count(arthur.id, "STONE_HAXE")
resp = await web.post(
f"/business/{arthur.id}/see-offer/{offer.character_id}/{offer.id}/deal"
)
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label or i.text for i in descr.items]
item_by_label = {i.label: i for i in descr.items}
give_wine_str = "Faire ce marché et donner Vin rouge (1.5 litres)"
assert give_wine_str in item_labels
assert "Faire ce marché et donner Hache de pierre (1)" in item_labels
give_wine_url = item_by_label[give_wine_str].form_action
resp = await web.post(give_wine_url)
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label or i.text for i in descr.items]
item_by_label = {i.label: i for i in descr.items}
take_wood_str = "Faire ce marché et obtenir Bois (0.5 mètre cubes)"
assert take_wood_str in item_labels
assert "Faire ce marché et obtenir Veste de cuir (1)" not in item_labels
# Give jacket to xena to permit take it
jacket = create_stuff(kernel, "LEATHER_JACKET")
kernel.stuff_lib.set_carried_by(jacket.id, character_id=xena.id)
resp = await web.post(give_wine_url)
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label or i.text for i in descr.items]
item_by_label = {i.label: i for i in descr.items}
take_wood_str = "Faire ce marché et obtenir Bois (0.5 mètre cubes)"
assert take_wood_str in item_labels
assert "Faire ce marché et obtenir Veste de cuir (1)" in item_labels
take_wood_url = item_by_label[take_wood_str].form_action
resp = await web.post(take_wood_url)
assert 200 == resp.status
assert not kernel.resource_lib.have_resource(xena.id, "WOOD", 0.5)
assert kernel.resource_lib.have_resource(xena.id, "RED_WINE", 1.5)
assert not kernel.stuff_lib.have_stuff_count(xena.id, "STONE_HAXE")
assert kernel.resource_lib.have_resource(arthur.id, "WOOD", 0.5)
assert not kernel.resource_lib.have_resource(arthur.id, "RED_WINE", 1.5)
assert kernel.stuff_lib.have_stuff_count(arthur.id, "STONE_HAXE")
async def test_create_with_character_transaction(
self,
worldmapc_xena_model: CharacterModel,
worldmapc_arthur_model: CharacterModel,
worldmapc_web_app: TestClient,
worldmapc_kernel: Kernel,
initial_universe_state: UniverseStateDocument,
) -> None:
"""+ conteur main page + vue depuis target + blinker"""
xena = worldmapc_xena_model
arthur = worldmapc_arthur_model
kernel = worldmapc_kernel
web = worldmapc_web_app
assert (
await web.post(
f"/business/{xena.id}/offers-create?with_character_id={arthur.id}",
json={"title": "My offer"},
)
).status == 200
assert (
await web.post(
f"/business/{xena.id}/offers/{1}/add-item"
f"?position=REQUEST&value=Plastic bottle (unité)&quantity=1"
)
).status == 200
assert (
await web.post(
f"/business/{xena.id}/offers/{1}/add-item"
f"?position=OFFER&value=Vin rouge (litres)&quantity=1.5"
)
).status == 200
assert (await web.post(f"/business/{xena.id}/offers/{1}?open=1")).status == 200
await self._assert_edit_offer(
kernel,
web,
character=xena,
offer_id=1,
request_operand_str=ONE_OF_THEM,
offer_operand_str=ONE_OF_THEM,
request_item_names=["Plastic bottle (1)"],
offer_item_names=["(X) Vin rouge (1.5 litres)"],
open_=True,
)
await self._assert_read_offer(
kernel,
web,
owner=xena,
character=arthur,
offer_id=1,
request_operand_str=ONE_OF_THEM,
offer_operand_str=ONE_OF_THEM,
have_not_item_names=["Plastic bottle (1)"],
offer_item_names=["(!) Vin rouge (1.5 litres)"],
can_make_deal=False,
)
# Give all necessary
kernel.resource_lib.add_resource_to("RED_WINE", 1.5, character_id=xena.id)
bottle = create_stuff(kernel, "PLASTIC_BOTTLE_1L")
kernel.stuff_lib.set_carried_by(bottle.id, character_id=arthur.id)
assert kernel.resource_lib.have_resource(xena.id, "RED_WINE", 1.5)
assert not kernel.stuff_lib.have_stuff_count(xena.id, "PLASTIC_BOTTLE_1L")
assert not kernel.resource_lib.have_resource(arthur.id, "RED_WINE", 1.5)
assert kernel.stuff_lib.have_stuff_count(arthur.id, "PLASTIC_BOTTLE_1L")
await self._assert_read_offer(
kernel,
web,
owner=xena,
character=arthur,
offer_id=1,
request_operand_str=ONE_OF_THEM,
offer_operand_str=ONE_OF_THEM,
have_item_names=["Plastic bottle (1)"],
offer_item_names=["Vin rouge (1.5 litres)"],
can_make_deal=True,
)
# xena main page
resp: ClientResponse = await web.post(f"/business/{xena.id}")
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label for i in descr.items]
assert "Voir les transactions en attente (1 en cours)" in item_labels
# arthur main page
resp: ClientResponse = await web.post(f"/business/{arthur.id}")
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label for i in descr.items]
assert "*Voir les transactions en attente (1 en cours)" in item_labels
resp = await web.post(f"/business/{arthur.id}/see-offer/{xena.id}/{1}/deal")
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label or i.text for i in descr.items]
item_by_label = {i.label: i for i in descr.items}
deal_str = "Faire ce marché et donner Plastic bottle (1)"
assert deal_str in item_labels
go_url = item_by_label[deal_str].form_action
resp = await web.post(go_url)
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label or i.text for i in descr.items]
item_by_label = {i.label: i for i in descr.items}
deal_str = "Faire ce marché et obtenir Vin rouge (1.5 litres)"
assert deal_str in item_labels
go_url = item_by_label[deal_str].form_action
assert (await web.post(go_url)).status == 200
assert not kernel.resource_lib.have_resource(xena.id, "RED_WINE", 1.5)
assert kernel.stuff_lib.have_stuff_count(xena.id, "PLASTIC_BOTTLE_1L")
assert kernel.resource_lib.have_resource(arthur.id, "RED_WINE", 1.5)
assert not kernel.stuff_lib.have_stuff_count(arthur.id, "PLASTIC_BOTTLE_1L")
# xena main page
resp: ClientResponse = await web.post(f"/business/{xena.id}")
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label for i in descr.items]
assert "Voir les transactions en attente (0 en cours)" in item_labels
# arthur main page
resp: ClientResponse = await web.post(f"/business/{arthur.id}")
assert 200 == resp.status
descr = description_serializer.load(await resp.json())
item_labels = [i.label for i in descr.items]
assert "Voir les transactions en attente (0 en cours)" in item_labels
| [
"[email protected]"
] | |
e259df553081c2a0843857a31971fbeb29ab02d1 | 8c9df3465ec7cab68b10e67823c1f9b475dab68e | /square__transverse_longitudinal_field_af_ising__static/square_ising.py | 12dad1d1699c6934cd3da33fb9d3ea8f37bdd5f5 | [
"BSD-3-Clause"
] | permissive | deyh2020/quspin_example | f86cf3cea2b8c04efc017e9618cb935494e94f82 | 931ca2ea5e6bbe02ebdd6d6a22d90db24d6c760c | refs/heads/master | 2023-02-07T21:27:12.913763 | 2020-12-30T08:00:57 | 2020-12-30T08:00:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,427 | py | ## http://weinbe58.github.io/QuSpin/generated/quspin.basis.spin_basis_general.html#quspin.basis.spin_basis_general
## https://doi.org/10.1103/PhysRevX.8.021069
## https://doi.org/10.1103/PhysRevX.8.021070
## consider nearest neighbor Ising
from __future__ import print_function, division
from quspin.operators import hamiltonian # operators
from quspin.basis import spin_basis_general # spin basis constructor
import numpy as np # general math functions
def exact_diag(J,Hx,Hz,Lx,Ly):
N_2d = Lx*Ly # number of sites
###### setting up user-defined symmetry transformations for 2d lattice ######
s = np.arange(N_2d) # sites [0,1,2,....]
x = s%Lx # x positions for sites
y = s//Lx # y positions for sites
T_x = (x+1)%Lx + Lx*y # translation along x-direction
T_y = x +Lx*((y+1)%Ly) # translation along y-direction
P_x = x + Lx*(Ly-y-1) # reflection about x-axis
P_y = (Lx-x-1) + Lx*y # reflection about y-axis
Z = -(s+1) # spin inversion
###### setting up bases ######
# basis_2d = spin_basis_general(N=N_2d,S="1/2",pauli=0)
basis_2d = spin_basis_general(N=N_2d,S="1/2",pauli=0,kxblock=(T_x,0),kyblock=(T_y,0))
###### setting up hamiltonian ######
# setting up site-coupling lists
Jzzs = [[J,i,T_x[i]] for i in range(N_2d)]+[[J,i,T_y[i]] for i in range(N_2d)]
Hxs = [[-Hx,i] for i in range(N_2d)]
Hzs = [[-Hz,i] for i in range(N_2d)]
static = [["zz",Jzzs],["x",Hxs],["z",Hzs]]
# build hamiltonian
# H = hamiltonian(static,[],static_fmt="csr",basis=basis_2d,dtype=np.float64)
no_checks = dict(check_symm=False, check_pcon=False, check_herm=False)
H = hamiltonian(static,[],static_fmt="csr",basis=basis_2d,dtype=np.float64,**no_checks)
# diagonalise H
ene,vec = H.eigsh(time=0.0,which="SA",k=2)
# ene = H.eigsh(time=0.0,which="SA",k=2,return_eigenvectors=False); ene = np.sort(ene)
norm2 = np.linalg.norm(vec[:,0])**2
# calculate uniform magnetization
int_mx = [[1.0,i] for i in range(N_2d)]
int_mz = [[1.0,i] for i in range(N_2d)]
static_mx = [["x",int_mx]]
static_mz = [["z",int_mz]]
op_mx = hamiltonian(static_mx,[],static_fmt="csr",basis=basis_2d,dtype=np.float64,**no_checks).tocsr(time=0)
op_mz = hamiltonian(static_mz,[],static_fmt="csr",basis=basis_2d,dtype=np.float64,**no_checks).tocsr(time=0)
mx = (np.conjugate(vec[:,0]).dot(op_mx.dot(vec[:,0])) / norm2).real / N_2d
mz = (np.conjugate(vec[:,0]).dot(op_mz.dot(vec[:,0])) / norm2).real / N_2d
# calculate n.n. sz.sz correlation
int_mz0mz1 = [[1.0,i,T_x[i]] for i in range(N_2d)]+[[1.0,i,T_y[i]] for i in range(N_2d)]
static_mz0mz1 = [["zz",int_mz0mz1]]
op_mz0mz1 = hamiltonian(static_mz0mz1,[],static_fmt="csr",basis=basis_2d,dtype=np.float64,**no_checks).tocsr(time=0)
mz0mz1 = (np.conjugate(vec[:,0]).dot(op_mz0mz1.dot(vec[:,0])) / norm2).real / N_2d
return ene, mx, mz, mz0mz1
def main():
###### define model parameters ######
Lx, Ly = 4, 4 # linear dimension of 2d lattice
N_2d = Lx*Ly # number of sites
J = 1.0 # AF Ising
# Hz = 2.00 # longitudinal field
Hzs = np.linspace(0.0,4.0,401)
# Hzs = np.linspace(1.99,2.03,41)
Hx = 0.10 # transverse field
for Hz in Hzs:
ene, mx, mz, mz0mz1 = exact_diag(J,Hx,Hz,Lx,Ly)
# print(J,Hz,Hx,Lx,Ly,ene[0]/N_2d,ene[1]/N_2d)
print(J,Hz,Hx,Lx,Ly,ene[0]/N_2d,mx,mz,mz0mz1)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
7534fdc5e9d0e271082d603c5c0a1ba2262d679e | 873d858b79a51a6a14e74e1a6fe4cc97809a69bc | /rosserial_ws/devel/lib/rosserial_client/make_library.py | eed0f221f32c99f4c790655eeb0d5132d20cacf2 | [] | no_license | nichoteloo/ROS-Noetic-devel | cf3058014fc491f38a23426c136cb8fbdee7a397 | 81e7090c5dc0e548aed4aa57b9579e355e9bcd25 | refs/heads/master | 2023-05-07T19:21:03.804523 | 2021-06-02T21:13:48 | 2021-06-02T21:13:48 | 373,293,635 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 597 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# generated from catkin/cmake/template/script.py.in
# creates a relay to a python script source file, acting as that file.
# The purpose is that of a symlink
python_script = '/home/nichotelo/ros/rosserial_ws/src/rosserial/rosserial_client/src/rosserial_client/make_library.py'
with open(python_script, 'r') as fh:
context = {
'__builtins__': __builtins__,
'__doc__': None,
'__file__': python_script,
'__name__': __name__,
'__package__': None,
}
exec(compile(fh.read(), python_script, 'exec'), context)
| [
"[email protected]"
] | |
e23c70a6f0bf97c57a6a3211e8ce4ee4c23a4b01 | 22d3d698edfa66d071a77b98d9d293087e699d87 | /casanova/cli.py | 1ac34d646aac0c3f4e2ac82fc186be538414654c | [
"MIT"
] | permissive | medialab/casanova | 8b880b1848f8f1ea785fdba483395a7d7085b87f | fcd449df0fba33a48693bea4919c81e1654a6866 | refs/heads/master | 2023-07-30T04:16:33.079309 | 2023-07-13T12:41:43 | 2023-07-13T12:41:43 | 254,628,154 | 13 | 2 | MIT | 2023-01-17T16:00:31 | 2020-04-10T12:23:34 | Python | UTF-8 | Python | false | false | 14,540 | py | from typing import Optional, List
import re
import sys
import gzip
import json
import math
import random
import statistics
from itertools import islice
from types import GeneratorType
from os.path import join
from urllib.parse import urlsplit, urljoin
from multiprocessing import Pool as MultiProcessPool
from dataclasses import dataclass
from collections import Counter, defaultdict, deque, OrderedDict
from collections.abc import Mapping, Iterable
from casanova import (
Reader,
Enricher,
CSVSerializer,
RowWrapper,
Headers,
Writer,
InferringWriter,
)
from casanova.utils import import_target, flatmap
@dataclass
class InitializerOptions:
code: str
module: bool
row_len: int
args: List[str]
init_codes: List[str]
before_codes: List[str]
after_codes: List[str]
fieldnames: Optional[List[str]] = None
selected_indices: Optional[List[int]] = None
base_dir: Optional[str] = None
# NOTE: just a thin wrapper to make sure we catch KeyboardInterrupt in
# child processes gracefully.
class WorkerWrapper(object):
__slots__ = ("fn",)
def __init__(self, fn):
self.fn = fn
def __call__(self, *args, **kwargs):
try:
return self.fn(*args, **kwargs)
except KeyboardInterrupt:
sys.exit(1)
class SingleProcessPool(object):
def imap(self, worker, tasks, chunksize=1):
for t in tasks:
yield worker(t)
def imap_unordered(self, *args, **kwargs):
yield from self.imap(*args, **kwargs)
def __enter__(self):
return self
def __exit__(self, *args):
return
def get_pool(n: int, options: InitializerOptions):
initargs = (options,)
if n < 2:
multiprocessed_initializer(*initargs)
return SingleProcessPool()
return MultiProcessPool(
n, initializer=multiprocessed_initializer, initargs=initargs
)
def get_csv_serializer(cli_args):
return CSVSerializer(
plural_separator=cli_args.plural_separator,
none_value=cli_args.none_value,
true_value=cli_args.true_value,
false_value=cli_args.false_value,
)
def get_inferring_writer(output_file, cli_args):
return InferringWriter(
output_file,
fieldnames=cli_args.fieldnames,
plural_separator=cli_args.plural_separator,
none_value=cli_args.none_value,
true_value=cli_args.true_value,
false_value=cli_args.false_value,
)
# Global multiprocessing variables
CODE = None
FUNCTION = None
ARGS = None
SELECTION = None
BEFORE_CODES = []
AFTER_CODES = []
EVALUATION_CONTEXT = {}
ROW = None
BASE_DIR = None
def read(path, encoding: str = "utf-8") -> Optional[str]:
global BASE_DIR
if BASE_DIR is not None:
path = join(BASE_DIR, path)
if path.endswith(".gz"):
try:
with gzip.open(path, encoding=encoding, mode="rt") as f:
return f.read()
except FileNotFoundError:
return None
try:
with open(path, encoding="utf-8", mode="r") as f:
return f.read()
except FileNotFoundError:
return None
EVALUATION_CONTEXT_LIB = {
# lib
"join": join,
"math": math,
"mean": statistics.mean,
"median": statistics.median,
"random": random,
"re": re,
"read": read,
"urljoin": urljoin,
"urlsplit": urlsplit,
# classes
"Counter": Counter,
"defaultdict": defaultdict,
"deque": deque,
}
def initialize_evaluation_context():
global EVALUATION_CONTEXT
EVALUATION_CONTEXT = {
**EVALUATION_CONTEXT_LIB,
# state
"fieldnames": None,
"headers": None,
"index": 0,
"row": None,
"cell": None,
"cells": None,
}
def multiprocessed_initializer(options: InitializerOptions):
global CODE
global FUNCTION
global ARGS
global BEFORE_CODES
global AFTER_CODES
global ROW
global SELECTION
global BASE_DIR
# Reset in case of multiple execution from same process
CODE = None
FUNCTION = None
ARGS = None
SELECTION = None
BEFORE_CODES = []
AFTER_CODES = []
ROW = None
BASE_DIR = options.base_dir
initialize_evaluation_context()
if options.module:
FUNCTION = import_target(options.code)
ARGS = options.args
else:
CODE = options.code
BEFORE_CODES = options.before_codes
AFTER_CODES = options.after_codes
if options.selected_indices is not None:
SELECTION = options.selected_indices
if options.fieldnames is not None:
EVALUATION_CONTEXT["fieldnames"] = options.fieldnames
EVALUATION_CONTEXT["headers"] = Headers(options.fieldnames)
headers = EVALUATION_CONTEXT["headers"]
else:
headers = Headers(range(options.row_len))
for init_code in options.init_codes:
exec(init_code, None, EVALUATION_CONTEXT)
EVALUATION_CONTEXT["row"] = RowWrapper(headers, None)
ROW = EVALUATION_CONTEXT["row"]
def select(row):
if SELECTION is None:
return
cells = [row[i] for i in SELECTION]
EVALUATION_CONTEXT["cells"] = cells
EVALUATION_CONTEXT["cell"] = cells[0]
def multiprocessed_worker_using_eval(payload):
global EVALUATION_CONTEXT
i, row = payload
EVALUATION_CONTEXT["index"] = i
ROW._replace(row)
select(row)
try:
for before_code in BEFORE_CODES:
exec(before_code, EVALUATION_CONTEXT, None)
value = eval(CODE, EVALUATION_CONTEXT, None)
for after_code in AFTER_CODES:
exec(after_code, EVALUATION_CONTEXT, None)
return None, i, value
except Exception as e:
return e, i, None
def collect_args(i, row):
for arg_name in ARGS:
if arg_name == "row":
yield ROW
elif arg_name == "index":
yield i
elif arg_name == "fieldnames":
yield EVALUATION_CONTEXT["fieldnames"]
elif arg_name == "headers":
yield EVALUATION_CONTEXT["headers"]
elif arg_name == "cell":
# NOTE: we know SELECTION is relevant because it's validated by CLI
yield row[SELECTION[0]]
elif arg_name == "cells":
# NOTE: we know SELECTION is relevant because it's validated by CLI
for idx in SELECTION:
yield row[idx]
else:
raise TypeError("unknown arg_name: %s" % arg_name)
def multiprocessed_worker_using_function(payload):
i, row = payload
ROW._replace(row)
args = tuple(collect_args(i, row))
try:
value = FUNCTION(*args)
# NOTE: consuming generators
if isinstance(value, GeneratorType):
value = list(value)
return None, i, value
except Exception as e:
return e, i, None
# TODO: go to minet for progress bar and rich?
# TODO: write proper cli documentation
def mp_iteration(cli_args, reader: Reader):
worker = (
multiprocessed_worker_using_eval
if not cli_args.module
else multiprocessed_worker_using_function
)
if cli_args.processes > 1:
worker = WorkerWrapper(worker)
selected_indices = None
if cli_args.select:
if reader.headers is not None:
selected_indices = reader.headers.select(cli_args.select)
else:
selected_indices = Headers.select_no_headers(cli_args.select)
init_options = InitializerOptions(
code=cli_args.code,
module=cli_args.module,
args=cli_args.args,
init_codes=cli_args.init,
before_codes=cli_args.before,
after_codes=cli_args.after,
row_len=reader.row_len,
fieldnames=reader.fieldnames,
selected_indices=selected_indices,
base_dir=cli_args.base_dir,
)
with get_pool(cli_args.processes, init_options) as pool:
# NOTE: we keep track of rows being worked on from the main process
# to avoid serializing them back with worker result.
worked_rows = {}
def payloads():
for t in reader.enumerate():
worked_rows[t[0]] = t[1]
yield t
mapper = pool.imap if not cli_args.unordered else pool.imap_unordered
for exc, i, result in mapper(worker, payloads(), chunksize=cli_args.chunk_size):
row = worked_rows.pop(i)
if exc is not None:
if cli_args.ignore_errors:
result = None
else:
raise exc
yield i, row, result
def map_action(cli_args, output_file):
serialize = get_csv_serializer(cli_args)
with Enricher(
cli_args.file,
output_file,
add=[cli_args.new_column],
delimiter=cli_args.delimiter,
) as enricher:
for _, row, result in mp_iteration(cli_args, enricher):
enricher.writerow(row, [serialize(result)])
def flatmap_action(cli_args, output_file):
serialize = get_csv_serializer(cli_args)
with Enricher(
cli_args.file,
output_file,
add=[cli_args.new_column],
delimiter=cli_args.delimiter,
) as enricher:
for _, row, result in mp_iteration(cli_args, enricher):
for value in flatmap(result):
enricher.writerow(row, [serialize(value)])
def filter_action(cli_args, output_file):
with Enricher(cli_args.file, output_file, delimiter=cli_args.delimiter) as enricher:
for _, row, result in mp_iteration(cli_args, enricher):
if cli_args.invert_match:
result = not result
if result:
enricher.writerow(row)
def map_reduce_action(cli_args, output_file):
acc_fn = None
if cli_args.module:
acc_fn = import_target(cli_args.accumulator)
with Reader(
cli_args.file,
delimiter=cli_args.delimiter,
) as enricher:
acc_context = EVALUATION_CONTEXT_LIB.copy()
acc = None
initialized = False
if cli_args.init_value is not None:
initialized = True
acc = eval(cli_args.init_value, acc_context, None)
acc_context["acc"] = acc
for _, row, result in mp_iteration(cli_args, enricher):
if not initialized:
acc_context["acc"] = result
initialized = True
continue
if acc_fn is None:
acc_context["current"] = result
acc_context["acc"] = eval(cli_args.accumulator, acc_context, None)
else:
acc_context["acc"] = acc_fn(acc_context["acc"], result)
final_result = acc_context["acc"]
if cli_args.json:
json.dump(
final_result,
output_file,
indent=2 if cli_args.pretty else None,
ensure_ascii=False,
)
print(file=output_file)
elif cli_args.csv:
writer = get_inferring_writer(output_file, cli_args)
writer.writerow(final_result)
else:
print(final_result, file=output_file)
class GroupWrapper:
__slots__ = ("__name", "__rows", "__wrapper")
def __init__(self, fieldnames):
self.__wrapper = RowWrapper(Headers(fieldnames), range(len(fieldnames)))
def _replace(self, name, rows):
self.__name = name
self.__rows = rows
@property
def name(self):
return self.__name
def __len__(self):
return len(self.__rows)
def __iter__(self):
for row in self.__rows:
self.__wrapper._replace(row)
yield self.__wrapper
def groupby_action(cli_args, output_file):
agg_fn = None
if cli_args.module:
agg_fn = import_target(cli_args.aggregator)
with Reader(
cli_args.file,
delimiter=cli_args.delimiter,
) as enricher:
# NOTE: using an ordered dict to guarantee stability for all python versions
groups = OrderedDict()
# Grouping
for _, row, result in mp_iteration(cli_args, enricher):
l = groups.get(result)
if l is None:
l = [row]
groups[result] = l
else:
l.append(row)
# Aggregating
agg_context = EVALUATION_CONTEXT_LIB.copy()
header_emitted = False
writer = Writer(output_file)
fieldnames = ["group"]
mapping_fieldnames = None
serializer = get_csv_serializer(cli_args)
if cli_args.fieldnames is not None:
mapping_fieldnames = cli_args.fieldnames
fieldnames += cli_args.fieldnames
header_emitted = True
writer.writerow(fieldnames)
group_wrapper = GroupWrapper(enricher.fieldnames)
for name, rows in groups.items():
group_wrapper._replace(name, rows)
if agg_fn is not None:
result = agg_fn(group_wrapper)
else:
agg_context["group"] = group_wrapper
result = eval(cli_args.aggregator, agg_context, None)
name = serializer(name)
if isinstance(result, Mapping):
if not header_emitted:
mapping_fieldnames = list(result.keys())
fieldnames += mapping_fieldnames
writer.writerow(fieldnames)
header_emitted = True
writer.writerow(
[name] + serializer.serialize_dict_row(result, mapping_fieldnames)
)
elif isinstance(result, Iterable) and not isinstance(result, (bytes, str)):
if not header_emitted:
fieldnames += ["col%i" % i for i in range(1, len(result) + 1)]
writer.writerow(fieldnames)
header_emitted = True
writer.writerow([name] + serializer.serialize_row(result))
else:
if not header_emitted:
writer.writerow(fieldnames + ["value"])
header_emitted = True
writer.writerow([name, serializer(result)])
def reverse_action(cli_args, output_file):
with Enricher(
cli_args.file, output_file, delimiter=cli_args.delimiter, reverse=True
) as enricher:
it = enricher
if cli_args.lines is not None:
it = islice(enricher, cli_args.lines)
for row in it:
enricher.writerow(row)
| [
"[email protected]"
] | |
dde7d82754424f14d0b28a6142c13333535560f6 | e3adbec6cd8d0b50880b3b606352a1c751d4ac79 | /functions/singly_linked_list.py | 7cadf3954044adea1f9fcd0cccd0b5268d96d8b1 | [] | no_license | ZiyaoGeng/LeetCode | 3cc5b553df5eac2e5bbb3ccd0f0ed4229574fa2f | c4c60b289c0bd9d9f228d04abe948d6287e70ea8 | refs/heads/master | 2022-04-07T08:19:58.647408 | 2020-03-12T08:56:13 | 2020-03-12T08:56:13 | 218,981,503 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 211 | py |
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
| [
"[email protected]"
] | |
d8e85972fade73cbb7841a166d847c90f11b5bd4 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2021_04_01/operations/_virtual_machine_extensions_operations.py | 9e1af3df025f8ded444c980807a5abb4416ee1ed | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 44,661 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_create_or_update_request(
resource_group_name: str, vm_name: str, vm_extension_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"vmName": _SERIALIZER.url("vm_name", vm_name, "str"),
"vmExtensionName": _SERIALIZER.url("vm_extension_name", vm_extension_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_update_request(
resource_group_name: str, vm_name: str, vm_extension_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"vmName": _SERIALIZER.url("vm_name", vm_name, "str"),
"vmExtensionName": _SERIALIZER.url("vm_extension_name", vm_extension_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_delete_request(
resource_group_name: str, vm_name: str, vm_extension_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"vmName": _SERIALIZER.url("vm_name", vm_name, "str"),
"vmExtensionName": _SERIALIZER.url("vm_extension_name", vm_extension_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs)
def build_get_request(
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
subscription_id: str,
*,
expand: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"vmName": _SERIALIZER.url("vm_name", vm_name, "str"),
"vmExtensionName": _SERIALIZER.url("vm_extension_name", vm_extension_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_request(
resource_group_name: str, vm_name: str, subscription_id: str, *, expand: Optional[str] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions",
) # pylint: disable=line-too-long
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"),
"vmName": _SERIALIZER.url("vm_name", vm_name, "str"),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
if expand is not None:
_params["$expand"] = _SERIALIZER.query("expand", expand, "str")
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class VirtualMachineExtensionsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.compute.v2021_04_01.ComputeManagementClient`'s
:attr:`virtual_machine_extensions` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
def _create_or_update_initial(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: Union[_models.VirtualMachineExtension, IO],
**kwargs: Any
) -> _models.VirtualMachineExtension:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.VirtualMachineExtension] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(extension_parameters, (IO, bytes)):
_content = extension_parameters
else:
_json = self._serialize.body(extension_parameters, "VirtualMachineExtension")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("VirtualMachineExtension", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("VirtualMachineExtension", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
_create_or_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}"
}
@overload
def begin_create_or_update(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: _models.VirtualMachineExtension,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.VirtualMachineExtension]:
"""The operation to create or update the extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_name: The name of the virtual machine where the extension should be created or
updated. Required.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Create Virtual Machine Extension
operation. Required.
:type extension_parameters: ~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtension
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualMachineExtension or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.VirtualMachineExtension]:
"""The operation to create or update the extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_name: The name of the virtual machine where the extension should be created or
updated. Required.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Create Virtual Machine Extension
operation. Required.
:type extension_parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualMachineExtension or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: Union[_models.VirtualMachineExtension, IO],
**kwargs: Any
) -> LROPoller[_models.VirtualMachineExtension]:
"""The operation to create or update the extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_name: The name of the virtual machine where the extension should be created or
updated. Required.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Create Virtual Machine Extension
operation. Is either a model type or a IO type. Required.
:type extension_parameters: ~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtension or
IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualMachineExtension or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.VirtualMachineExtension] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
extension_parameters=extension_parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("VirtualMachineExtension", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}"
}
def _update_initial(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: Union[_models.VirtualMachineExtensionUpdate, IO],
**kwargs: Any
) -> _models.VirtualMachineExtension:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.VirtualMachineExtension] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(extension_parameters, (IO, bytes)):
_content = extension_parameters
else:
_json = self._serialize.body(extension_parameters, "VirtualMachineExtensionUpdate")
request = build_update_request(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("VirtualMachineExtension", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}"
}
@overload
def begin_update(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: _models.VirtualMachineExtensionUpdate,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.VirtualMachineExtension]:
"""The operation to update the extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_name: The name of the virtual machine where the extension should be updated.
Required.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Update Virtual Machine Extension
operation. Required.
:type extension_parameters:
~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtensionUpdate
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualMachineExtension or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_update(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.VirtualMachineExtension]:
"""The operation to update the extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_name: The name of the virtual machine where the extension should be updated.
Required.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Update Virtual Machine Extension
operation. Required.
:type extension_parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualMachineExtension or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_update(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
extension_parameters: Union[_models.VirtualMachineExtensionUpdate, IO],
**kwargs: Any
) -> LROPoller[_models.VirtualMachineExtension]:
"""The operation to update the extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_name: The name of the virtual machine where the extension should be updated.
Required.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param extension_parameters: Parameters supplied to the Update Virtual Machine Extension
operation. Is either a model type or a IO type. Required.
:type extension_parameters:
~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtensionUpdate or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualMachineExtension or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtension]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.VirtualMachineExtension] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._update_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
extension_parameters=extension_parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("VirtualMachineExtension", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}"
}
def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, vm_name: str, vm_extension_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_delete_request(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}"
}
@distributed_trace
def begin_delete(
self, resource_group_name: str, vm_name: str, vm_extension_name: str, **kwargs: Any
) -> LROPoller[None]:
"""The operation to delete the extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_name: The name of the virtual machine where the extension should be deleted.
Required.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}"
}
@distributed_trace
def get(
self,
resource_group_name: str,
vm_name: str,
vm_extension_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> _models.VirtualMachineExtension:
"""The operation to get the extension.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_name: The name of the virtual machine containing the extension. Required.
:type vm_name: str
:param vm_extension_name: The name of the virtual machine extension. Required.
:type vm_extension_name: str
:param expand: The expand expression to apply on the operation. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineExtension or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtension
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
cls: ClsType[_models.VirtualMachineExtension] = kwargs.pop("cls", None)
request = build_get_request(
resource_group_name=resource_group_name,
vm_name=vm_name,
vm_extension_name=vm_extension_name,
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("VirtualMachineExtension", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions/{vmExtensionName}"
}
@distributed_trace
def list(
self, resource_group_name: str, vm_name: str, expand: Optional[str] = None, **kwargs: Any
) -> _models.VirtualMachineExtensionsListResult:
"""The operation to get all extensions of a Virtual Machine.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param vm_name: The name of the virtual machine containing the extension. Required.
:type vm_name: str
:param expand: The expand expression to apply on the operation. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineExtensionsListResult or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2021_04_01.models.VirtualMachineExtensionsListResult
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-04-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01"))
cls: ClsType[_models.VirtualMachineExtensionsListResult] = kwargs.pop("cls", None)
request = build_list_request(
resource_group_name=resource_group_name,
vm_name=vm_name,
subscription_id=self._config.subscription_id,
expand=expand,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("VirtualMachineExtensionsListResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/extensions"
}
| [
"[email protected]"
] | |
811e650b58eaf4337be5d070b3152062620dfaa4 | 1d1a21b37e1591c5b825299de338d18917715fec | /Mathematics/Data science/Mathmatics/02/inverse_matrix.py | 5531c0cc7924c0fa9e1eb9313e95e425439086b8 | [] | no_license | brunoleej/study_git | 46279c3521f090ebf63ee0e1852aa0b6bed11b01 | 0c5c9e490140144caf1149e2e1d9fe5f68cf6294 | refs/heads/main | 2023-08-19T01:07:42.236110 | 2021-08-29T16:20:59 | 2021-08-29T16:20:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 249 | py | import numpy as np
A = np.array([[1,1,0],[0,1,1],[1,1,1]])
print(A)
'''
[[1 1 0]
[0 1 1]
[1 1 1]]
'''
# 역행렬(inverse_matrix 계산)
Ainv = np.linalg.inv(A)
print(Ainv)
'''
[[ 0. -1. 1.]
[ 1. 1. -1.]
[-1. 0. 1.]]
'''
| [
"[email protected]"
] | |
f327af434bdb44b8db26624273fa576fedb584a9 | 371fe9a1fdeb62ad1142b34d732bde06f3ce21a0 | /scripts/compute_path_pair_distances.py | 32499ed5d2cd2871d18a77acc24343b70b16f798 | [] | no_license | maickrau/rdna_resolution | 971f3b7e803565c9432be69b8e2a2852f55b8b79 | aab42310c31e655cbbc318331082fa3436d69075 | refs/heads/master | 2023-03-03T05:14:33.966930 | 2021-02-17T20:45:20 | 2021-02-17T20:45:20 | 339,851,442 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,426 | py | #!/usr/bin/python
import sys
graphfile = sys.argv[1]
max_diff = int(sys.argv[2])
modulo = int(sys.argv[3])
moduloindex = int(sys.argv[4])
# name \t path from stdin
def revcomp(s):
comp = {'A': 'T', 'T': 'A', 'C': 'G', 'G': 'C'}
return "".join(comp[c] for c in s[::-1])
def pathseq(p):
global nodeseqs
seq_no_hpc = "".join(nodeseqs[n[1:]] if n[0] == '>' else revcomp(nodeseqs[n[1:]]) for n in p)
# seq_hpc = seq_no_hpc[0]
# for i in range(1, len(seq_no_hpc)):
# if seq_no_hpc[i] != seq_no_hpc[i-1]: seq_hpc += seq_no_hpc[i]
# return seq_hpc
return seq_no_hpc
def edit_distance_simple(p1, p2):
global max_diff
if len(p1) - len(p2) <= -max_diff or len(p1) - len(p2) >= max_diff: return None
last_row = []
for i in range(0, len(p2)+1):
last_row.append(i)
for i in range(1, len(p1)):
next_row = [i]
min_this_row = i
for j in range(0, len(p2)):
index = len(next_row)
next_row.append(min(next_row[index-1]+1, last_row[index]+1))
if p1[i] == p2[j]:
next_row[index] = min(next_row[index], last_row[index-1])
else:
next_row[index] = min(next_row[index], last_row[index-1]+1)
min_this_row = min(min_this_row, next_row[index])
last_row = next_row
# if min_this_row >= max_diff: return None
return last_row[-1]
def edit_distance_wfa(p1, p2):
global max_diff
# use wfa because new and fancy
# https://academic.oup.com/bioinformatics/advance-article/doi/10.1093/bioinformatics/btaa777/5904262?rss=1
if len(p1) - len(p2) < -max_diff or len(p1) - len(p2) > max_diff: return None
start_match = -1
while start_match+1 < len(p1) and start_match+1 < len(p2) and p1[start_match+1] == p2[start_match+1]:
start_match += 1
if start_match == len(p1) and start_match == len(p2): return 0
last_column = [start_match]
# sys.stderr.write("0" + "\n")
for i in range(1, max_diff):
offset = i-1
# sys.stderr.write(str(i) + "\n")
next_column = []
last_match =last_column[-i+offset+1]
while last_match+1-i < len(p1) and last_match+1 < len(p2) and p1[last_match+1-i] == p2[last_match+1]:
last_match += 1
if last_match+1-i >= len(p1) and last_match+1 >= len(p2):
return i
next_column.append(last_match)
for j in range(-i+1, +i):
last_match = last_column[j+offset]+1
if j > -i+1:
last_match = max(last_match, last_column[j+offset-1]-1)
if j < i-1:
last_match = max(last_match, last_column[j+offset+1])
while last_match+1+j < len(p1) and last_match+1 < len(p2) and p1[last_match+1+j] == p2[last_match+1]:
last_match += 1
if last_match+1+j >= len(p1) and last_match+1 >= len(p2):
return i
next_column.append(last_match)
last_match = last_column[i+offset-1]-1
while last_match+1+i < len(p1) and last_match+1 < len(p2) and p1[last_match+1+i] == p2[last_match+1]:
last_match += 1
if last_match+1+i >= len(p1) and last_match+1 >= len(p2):
return i
next_column.append(last_match)
last_column = next_column
return None
def edit_distance(p1, p2):
global max_diff
# use wfa because new and fancy
# https://academic.oup.com/bioinformatics/advance-article/doi/10.1093/bioinformatics/btaa777/5904262?rss=1
if len(p1) - len(p2) < -max_diff or len(p1) - len(p2) > max_diff: return None
start_match = -1
while start_match+1 < len(p1) and start_match+1 < len(p2) and p1[start_match+1] == p2[start_match+1]:
start_match += 1
if start_match == len(p1) and start_match == len(p2): return 0
last_column = {0: start_match}
for i in range(1, max_diff):
offset = i-1
next_column = {}
for column in last_column:
if column not in next_column: next_column[column] = 0
next_column[column] = max(next_column[column], last_column[column]+1)
if column+1 not in next_column: next_column[column+1] = 0
next_column[column+1] = max(next_column[column+1], last_column[column])
if column-1 not in next_column: next_column[column-1] = 0
next_column[column-1] = max(next_column[column-1], last_column[column]-1)
p1_pos = last_column[column]
p2_pos = last_column[column] + column
if p1_pos >= 4 and p2_pos >= 4:
if p1[p1_pos-4:p1_pos] == p2[p2_pos-4:p2_pos] and p1[p1_pos-4:p1_pos-2] == p1[p1_pos-2:p1_pos]:
if p1_pos+2 <= len(p1) and p1[p1_pos:p1_pos+2] == p1[p1_pos-2:p1_pos]:
extend_until = 0
while True:
if column-extend_until not in next_column: next_column[column-extend_until] = 0
next_column[column-extend_until] = max(next_column[column-extend_until], last_column[column]+extend_until)
if p1_pos+extend_until+2 <= len(p1) and p1[p1_pos+extend_until:p1_pos+extend_until+2] == p1[p1_pos-2:p1_pos]:
extend_until += 2
else:
break
if p2_pos+2 <= len(p2) and p2[p2_pos:p2_pos+2] == p2[p2_pos-2:p2_pos]:
extend_until = 0
while True:
if column+extend_until+2 not in next_column: next_column[column+extend_until+2] = 0
next_column[column+extend_until+2] = max(next_column[column+extend_until+2], last_column[column])
if p2_pos+extend_until+2 <= len(p2) and p2[p2_pos+extend_until:p2_pos+extend_until+2] == p2[p2_pos-2:p2_pos]:
extend_until += 2
else:
break
for column in next_column:
p1_pos = next_column[column]
p2_pos = next_column[column] + column
while p1_pos+1 < len(p1) and p2_pos+1 < len(p2) and p1[p1_pos+1] == p2[p2_pos+1]:
next_column[column] += 1
p1_pos += 1
p2_pos += 1
if p1_pos+1 >= len(p1) and p2_pos+1 >= len(p2): return i
last_column = next_column
return None
nodeseqs = {}
with open(graphfile) as f:
for l in f:
parts = l.strip().split('\t')
if parts[0] == 'S':
nodeseqs[parts[1]] = parts[2]
num = 0
pathnum = {}
paths = {}
for l in sys.stdin:
parts = l.strip().split('\t')
name = parts[0]
last_break = 0
path = []
pathstr = parts[1] + '>'
for i in range(1, len(pathstr)):
if pathstr[i] == '<' or pathstr[i] == '>':
path.append(pathstr[last_break:i])
last_break = i
if name in paths: print(name)
assert name not in paths
paths[name] = pathseq(path)
pathnum[name] = num
num += 1
# print(name + "\t" + paths[name])
for path1 in paths:
if pathnum[path1] % modulo != moduloindex: continue
for path2 in paths:
if path1 <= path2: continue
value = max_diff + 1
edit_dist = edit_distance(paths[path1], paths[path2])
# edit_dist = edit_distance_simple(paths[path1], paths[path2])
if edit_dist is None: continue
if edit_dist is not None: value = edit_dist
print(path1 + "\t" + path2 + "\t" + str(value))
| [
"[email protected]"
] | |
1459e00c12efcf943450d0d9fbb1d34e6ba7db4b | 93d78f2dd852b90d295bd523fd0bc09a644ee0d2 | /test/sql/test_operators.py | e8ad88511482f9009137ee1ea40257fb924e0846 | [
"MIT"
] | permissive | mrocklin/sqlalchemy | ff13d4d07ba46a049da9611d356d07498e95337d | 156f473de00024688404d73aea305cd4fc452638 | refs/heads/master | 2020-12-03T09:30:34.956612 | 2014-12-01T18:31:48 | 2014-12-01T18:31:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 60,743 | py | from sqlalchemy.testing import fixtures, eq_, is_, is_not_
from sqlalchemy import testing
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.sql import column, desc, asc, literal, collate, null, true, false
from sqlalchemy.sql.expression import BinaryExpression, \
ClauseList, Grouping, \
UnaryExpression, select, union, func, tuple_
from sqlalchemy.sql import operators, table
import operator
from sqlalchemy import String, Integer, LargeBinary
from sqlalchemy import exc
from sqlalchemy.engine import default
from sqlalchemy.sql.elements import _literal_as_text
from sqlalchemy.schema import Column, Table, MetaData
from sqlalchemy.types import TypeEngine, TypeDecorator, UserDefinedType, Boolean
from sqlalchemy.dialects import mysql, firebird, postgresql, oracle, \
sqlite, mssql
from sqlalchemy import util
import datetime
import collections
from sqlalchemy import text, literal_column
from sqlalchemy import and_, not_, between, or_
from sqlalchemy.sql import true, false, null
class LoopOperate(operators.ColumnOperators):
def operate(self, op, *other, **kwargs):
return op
class DefaultColumnComparatorTest(fixtures.TestBase):
def _do_scalar_test(self, operator, compare_to):
left = column('left')
assert left.comparator.operate(operator).compare(
compare_to(left)
)
self._loop_test(operator)
def _do_operate_test(self, operator, right=column('right')):
left = column('left')
assert left.comparator.operate(
operator,
right).compare(
BinaryExpression(
_literal_as_text(left),
_literal_as_text(right),
operator))
assert operator(
left,
right).compare(
BinaryExpression(
_literal_as_text(left),
_literal_as_text(right),
operator))
self._loop_test(operator, right)
def _loop_test(self, operator, *arg):
l = LoopOperate()
is_(
operator(l, *arg),
operator
)
def test_desc(self):
self._do_scalar_test(operators.desc_op, desc)
def test_asc(self):
self._do_scalar_test(operators.asc_op, asc)
def test_plus(self):
self._do_operate_test(operators.add)
def test_is_null(self):
self._do_operate_test(operators.is_, None)
def test_isnot_null(self):
self._do_operate_test(operators.isnot, None)
def test_is_null_const(self):
self._do_operate_test(operators.is_, null())
def test_is_true_const(self):
self._do_operate_test(operators.is_, true())
def test_is_false_const(self):
self._do_operate_test(operators.is_, false())
def test_equals_true(self):
self._do_operate_test(operators.eq, True)
def test_notequals_true(self):
self._do_operate_test(operators.ne, True)
def test_is_true(self):
self._do_operate_test(operators.is_, True)
def test_isnot_true(self):
self._do_operate_test(operators.isnot, True)
def test_is_false(self):
self._do_operate_test(operators.is_, False)
def test_isnot_false(self):
self._do_operate_test(operators.isnot, False)
def test_like(self):
self._do_operate_test(operators.like_op)
def test_notlike(self):
self._do_operate_test(operators.notlike_op)
def test_ilike(self):
self._do_operate_test(operators.ilike_op)
def test_notilike(self):
self._do_operate_test(operators.notilike_op)
def test_is(self):
self._do_operate_test(operators.is_)
def test_isnot(self):
self._do_operate_test(operators.isnot)
def test_no_getitem(self):
assert_raises_message(
NotImplementedError,
"Operator 'getitem' is not supported on this expression",
self._do_operate_test, operators.getitem
)
assert_raises_message(
NotImplementedError,
"Operator 'getitem' is not supported on this expression",
lambda: column('left')[3]
)
def test_in(self):
left = column('left')
assert left.comparator.operate(operators.in_op, [1, 2, 3]).compare(
BinaryExpression(
left,
Grouping(ClauseList(
literal(1), literal(2), literal(3)
)),
operators.in_op
)
)
self._loop_test(operators.in_op, [1, 2, 3])
def test_notin(self):
left = column('left')
assert left.comparator.operate(operators.notin_op, [1, 2, 3]).compare(
BinaryExpression(
left,
Grouping(ClauseList(
literal(1), literal(2), literal(3)
)),
operators.notin_op
)
)
self._loop_test(operators.notin_op, [1, 2, 3])
def test_in_no_accept_list_of_non_column_element(self):
left = column('left')
foo = ClauseList()
assert_raises_message(
exc.InvalidRequestError,
r"in_\(\) accepts either a list of expressions or a selectable:",
left.in_, [foo]
)
def test_in_no_accept_non_list_non_selectable(self):
left = column('left')
right = column('right')
assert_raises_message(
exc.InvalidRequestError,
r"in_\(\) accepts either a list of expressions or a selectable:",
left.in_, right
)
def test_in_no_accept_non_list_thing_with_getitem(self):
# test [ticket:2726]
class HasGetitem(String):
class comparator_factory(String.Comparator):
def __getitem__(self, value):
return value
left = column('left')
right = column('right', HasGetitem)
assert_raises_message(
exc.InvalidRequestError,
r"in_\(\) accepts either a list of expressions or a selectable:",
left.in_, right
)
def test_collate(self):
left = column('left')
right = "some collation"
left.comparator.operate(operators.collate, right).compare(
collate(left, right)
)
def test_concat(self):
self._do_operate_test(operators.concat_op)
class CustomUnaryOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
def _factorial_fixture(self):
class MyInteger(Integer):
class comparator_factory(Integer.Comparator):
def factorial(self):
return UnaryExpression(self.expr,
modifier=operators.custom_op("!"),
type_=MyInteger)
def factorial_prefix(self):
return UnaryExpression(self.expr,
operator=operators.custom_op("!!"),
type_=MyInteger)
def __invert__(self):
return UnaryExpression(self.expr,
operator=operators.custom_op("!!!"),
type_=MyInteger)
return MyInteger
def test_factorial(self):
col = column('somecol', self._factorial_fixture())
self.assert_compile(
col.factorial(),
"somecol !"
)
def test_double_factorial(self):
col = column('somecol', self._factorial_fixture())
self.assert_compile(
col.factorial().factorial(),
"somecol ! !"
)
def test_factorial_prefix(self):
col = column('somecol', self._factorial_fixture())
self.assert_compile(
col.factorial_prefix(),
"!! somecol"
)
def test_factorial_invert(self):
col = column('somecol', self._factorial_fixture())
self.assert_compile(
~col,
"!!! somecol"
)
def test_double_factorial_invert(self):
col = column('somecol', self._factorial_fixture())
self.assert_compile(
~(~col),
"!!! (!!! somecol)"
)
def test_unary_no_ops(self):
assert_raises_message(
exc.CompileError,
"Unary expression has no operator or modifier",
UnaryExpression(literal("x")).compile
)
def test_unary_both_ops(self):
assert_raises_message(
exc.CompileError,
"Unary expression does not support operator and "
"modifier simultaneously",
UnaryExpression(literal("x"),
operator=operators.custom_op("x"),
modifier=operators.custom_op("y")).compile
)
class _CustomComparatorTests(object):
def test_override_builtin(self):
c1 = Column('foo', self._add_override_factory())
self._assert_add_override(c1)
def test_column_proxy(self):
t = Table('t', MetaData(),
Column('foo', self._add_override_factory())
)
proxied = t.select().c.foo
self._assert_add_override(proxied)
self._assert_and_override(proxied)
def test_alias_proxy(self):
t = Table('t', MetaData(),
Column('foo', self._add_override_factory())
)
proxied = t.alias().c.foo
self._assert_add_override(proxied)
self._assert_and_override(proxied)
def test_binary_propagate(self):
c1 = Column('foo', self._add_override_factory())
self._assert_add_override(c1 - 6)
self._assert_and_override(c1 - 6)
def test_reverse_binary_propagate(self):
c1 = Column('foo', self._add_override_factory())
self._assert_add_override(6 - c1)
self._assert_and_override(6 - c1)
def test_binary_multi_propagate(self):
c1 = Column('foo', self._add_override_factory())
self._assert_add_override((c1 - 6) + 5)
self._assert_and_override((c1 - 6) + 5)
def test_no_boolean_propagate(self):
c1 = Column('foo', self._add_override_factory())
self._assert_not_add_override(c1 == 56)
self._assert_not_and_override(c1 == 56)
def _assert_and_override(self, expr):
assert (expr & text("5")).compare(
expr.op("goofy_and")(text("5"))
)
def _assert_add_override(self, expr):
assert (expr + 5).compare(
expr.op("goofy")(5)
)
def _assert_not_add_override(self, expr):
assert not (expr + 5).compare(
expr.op("goofy")(5)
)
def _assert_not_and_override(self, expr):
assert not (expr & text("5")).compare(
expr.op("goofy_and")(text("5"))
)
class CustomComparatorTest(_CustomComparatorTests, fixtures.TestBase):
def _add_override_factory(self):
class MyInteger(Integer):
class comparator_factory(TypeEngine.Comparator):
def __init__(self, expr):
self.expr = expr
def __add__(self, other):
return self.expr.op("goofy")(other)
def __and__(self, other):
return self.expr.op("goofy_and")(other)
return MyInteger
class TypeDecoratorComparatorTest(_CustomComparatorTests, fixtures.TestBase):
def _add_override_factory(self):
class MyInteger(TypeDecorator):
impl = Integer
class comparator_factory(TypeDecorator.Comparator):
def __init__(self, expr):
self.expr = expr
def __add__(self, other):
return self.expr.op("goofy")(other)
def __and__(self, other):
return self.expr.op("goofy_and")(other)
return MyInteger
class TypeDecoratorWVariantComparatorTest(
_CustomComparatorTests,
fixtures.TestBase):
def _add_override_factory(self):
class SomeOtherInteger(Integer):
class comparator_factory(TypeEngine.Comparator):
def __init__(self, expr):
self.expr = expr
def __add__(self, other):
return self.expr.op("not goofy")(other)
def __and__(self, other):
return self.expr.op("not goofy_and")(other)
class MyInteger(TypeDecorator):
impl = Integer
class comparator_factory(TypeDecorator.Comparator):
def __init__(self, expr):
self.expr = expr
def __add__(self, other):
return self.expr.op("goofy")(other)
def __and__(self, other):
return self.expr.op("goofy_and")(other)
return MyInteger().with_variant(SomeOtherInteger, "mysql")
class CustomEmbeddedinTypeDecoratorTest(
_CustomComparatorTests,
fixtures.TestBase):
def _add_override_factory(self):
class MyInteger(Integer):
class comparator_factory(TypeEngine.Comparator):
def __init__(self, expr):
self.expr = expr
def __add__(self, other):
return self.expr.op("goofy")(other)
def __and__(self, other):
return self.expr.op("goofy_and")(other)
class MyDecInteger(TypeDecorator):
impl = MyInteger
return MyDecInteger
class NewOperatorTest(_CustomComparatorTests, fixtures.TestBase):
def _add_override_factory(self):
class MyInteger(Integer):
class comparator_factory(TypeEngine.Comparator):
def __init__(self, expr):
self.expr = expr
def foob(self, other):
return self.expr.op("foob")(other)
return MyInteger
def _assert_add_override(self, expr):
assert (expr.foob(5)).compare(
expr.op("foob")(5)
)
def _assert_not_add_override(self, expr):
assert not hasattr(expr, "foob")
def _assert_and_override(self, expr):
pass
def _assert_not_and_override(self, expr):
pass
class ExtensionOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
def test_contains(self):
class MyType(UserDefinedType):
class comparator_factory(UserDefinedType.Comparator):
def contains(self, other, **kw):
return self.op("->")(other)
self.assert_compile(
Column('x', MyType()).contains(5),
"x -> :x_1"
)
def test_getitem(self):
class MyType(UserDefinedType):
class comparator_factory(UserDefinedType.Comparator):
def __getitem__(self, index):
return self.op("->")(index)
self.assert_compile(
Column('x', MyType())[5],
"x -> :x_1"
)
def test_op_not_an_iterator(self):
# see [ticket:2726]
class MyType(UserDefinedType):
class comparator_factory(UserDefinedType.Comparator):
def __getitem__(self, index):
return self.op("->")(index)
col = Column('x', MyType())
assert not isinstance(col, collections.Iterable)
def test_lshift(self):
class MyType(UserDefinedType):
class comparator_factory(UserDefinedType.Comparator):
def __lshift__(self, other):
return self.op("->")(other)
self.assert_compile(
Column('x', MyType()) << 5,
"x -> :x_1"
)
def test_rshift(self):
class MyType(UserDefinedType):
class comparator_factory(UserDefinedType.Comparator):
def __rshift__(self, other):
return self.op("->")(other)
self.assert_compile(
Column('x', MyType()) >> 5,
"x -> :x_1"
)
class BooleanEvalTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"""test standalone booleans being wrapped in an AsBoolean, as well
as true/false compilation."""
def _dialect(self, native_boolean):
d = default.DefaultDialect()
d.supports_native_boolean = native_boolean
return d
def test_one(self):
c = column('x', Boolean)
self.assert_compile(
select([c]).where(c),
"SELECT x WHERE x",
dialect=self._dialect(True)
)
def test_two_a(self):
c = column('x', Boolean)
self.assert_compile(
select([c]).where(c),
"SELECT x WHERE x = 1",
dialect=self._dialect(False)
)
def test_two_b(self):
c = column('x', Boolean)
self.assert_compile(
select([c], whereclause=c),
"SELECT x WHERE x = 1",
dialect=self._dialect(False)
)
def test_three_a(self):
c = column('x', Boolean)
self.assert_compile(
select([c]).where(~c),
"SELECT x WHERE x = 0",
dialect=self._dialect(False)
)
def test_three_b(self):
c = column('x', Boolean)
self.assert_compile(
select([c], whereclause=~c),
"SELECT x WHERE x = 0",
dialect=self._dialect(False)
)
def test_four(self):
c = column('x', Boolean)
self.assert_compile(
select([c]).where(~c),
"SELECT x WHERE NOT x",
dialect=self._dialect(True)
)
def test_five_a(self):
c = column('x', Boolean)
self.assert_compile(
select([c]).having(c),
"SELECT x HAVING x = 1",
dialect=self._dialect(False)
)
def test_five_b(self):
c = column('x', Boolean)
self.assert_compile(
select([c], having=c),
"SELECT x HAVING x = 1",
dialect=self._dialect(False)
)
def test_six(self):
self.assert_compile(
or_(false(), true()),
"1 = 1",
dialect=self._dialect(False)
)
def test_eight(self):
self.assert_compile(
and_(false(), true()),
"false",
dialect=self._dialect(True)
)
def test_nine(self):
self.assert_compile(
and_(false(), true()),
"0 = 1",
dialect=self._dialect(False)
)
def test_ten(self):
c = column('x', Boolean)
self.assert_compile(
c == 1,
"x = :x_1",
dialect=self._dialect(False)
)
def test_eleven(self):
c = column('x', Boolean)
self.assert_compile(
c.is_(true()),
"x IS true",
dialect=self._dialect(True)
)
def test_twelve(self):
c = column('x', Boolean)
# I don't have a solution for this one yet,
# other than adding some heavy-handed conditionals
# into compiler
self.assert_compile(
c.is_(true()),
"x IS 1",
dialect=self._dialect(False)
)
class ConjunctionTest(fixtures.TestBase, testing.AssertsCompiledSQL):
"""test interaction of and_()/or_() with boolean , null constants
"""
__dialect__ = default.DefaultDialect(supports_native_boolean=True)
def test_one(self):
self.assert_compile(~and_(true()), "false")
def test_two(self):
self.assert_compile(or_(~and_(true())), "false")
def test_three(self):
self.assert_compile(or_(and_()), "")
def test_four(self):
x = column('x')
self.assert_compile(
and_(or_(x == 5), or_(x == 7)),
"x = :x_1 AND x = :x_2")
def test_five(self):
x = column("x")
self.assert_compile(
and_(true()._ifnone(None), x == 7),
"x = :x_1"
)
def test_six(self):
x = column("x")
self.assert_compile(or_(true(), x == 7), "true")
self.assert_compile(or_(x == 7, true()), "true")
self.assert_compile(~or_(x == 7, true()), "false")
def test_six_pt_five(self):
x = column("x")
self.assert_compile(select([x]).where(or_(x == 7, true())),
"SELECT x WHERE true")
self.assert_compile(
select(
[x]).where(
or_(
x == 7,
true())),
"SELECT x WHERE 1 = 1",
dialect=default.DefaultDialect(
supports_native_boolean=False))
def test_seven(self):
x = column("x")
self.assert_compile(
and_(true(), x == 7, true(), x == 9),
"x = :x_1 AND x = :x_2")
def test_eight(self):
x = column("x")
self.assert_compile(
or_(false(), x == 7, false(), x == 9),
"x = :x_1 OR x = :x_2")
def test_nine(self):
x = column("x")
self.assert_compile(
and_(x == 7, x == 9, false(), x == 5),
"false"
)
self.assert_compile(
~and_(x == 7, x == 9, false(), x == 5),
"true"
)
def test_ten(self):
self.assert_compile(
and_(None, None),
"NULL AND NULL"
)
def test_eleven(self):
x = column("x")
self.assert_compile(
select([x]).where(None).where(None),
"SELECT x WHERE NULL AND NULL"
)
def test_twelve(self):
x = column("x")
self.assert_compile(
select([x]).where(and_(None, None)),
"SELECT x WHERE NULL AND NULL"
)
def test_thirteen(self):
x = column("x")
self.assert_compile(
select([x]).where(~and_(None, None)),
"SELECT x WHERE NOT (NULL AND NULL)"
)
def test_fourteen(self):
x = column("x")
self.assert_compile(
select([x]).where(~null()),
"SELECT x WHERE NOT NULL"
)
def test_constant_non_singleton(self):
is_not_(null(), null())
is_not_(false(), false())
is_not_(true(), true())
def test_constant_render_distinct(self):
self.assert_compile(
select([null(), null()]),
"SELECT NULL AS anon_1, NULL AS anon_2"
)
self.assert_compile(
select([true(), true()]),
"SELECT true AS anon_1, true AS anon_2"
)
self.assert_compile(
select([false(), false()]),
"SELECT false AS anon_1, false AS anon_2"
)
class OperatorPrecedenceTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
table1 = table('mytable',
column('myid', Integer),
column('name', String),
column('description', String),
)
table2 = table('op', column('field'))
def test_operator_precedence_1(self):
self.assert_compile(
self.table2.select((self.table2.c.field == 5) == None),
"SELECT op.field FROM op WHERE (op.field = :field_1) IS NULL")
def test_operator_precedence_2(self):
self.assert_compile(
self.table2.select(
(self.table2.c.field + 5) == self.table2.c.field),
"SELECT op.field FROM op WHERE op.field + :field_1 = op.field")
def test_operator_precedence_3(self):
self.assert_compile(
self.table2.select((self.table2.c.field + 5) * 6),
"SELECT op.field FROM op WHERE (op.field + :field_1) * :param_1")
def test_operator_precedence_4(self):
self.assert_compile(
self.table2.select(
(self.table2.c.field * 5) + 6),
"SELECT op.field FROM op WHERE op.field * :field_1 + :param_1")
def test_operator_precedence_5(self):
self.assert_compile(self.table2.select(
5 + self.table2.c.field.in_([5, 6])),
"SELECT op.field FROM op WHERE :param_1 + "
"(op.field IN (:field_1, :field_2))")
def test_operator_precedence_6(self):
self.assert_compile(self.table2.select(
(5 + self.table2.c.field).in_([5, 6])),
"SELECT op.field FROM op WHERE :field_1 + op.field "
"IN (:param_1, :param_2)")
def test_operator_precedence_7(self):
self.assert_compile(self.table2.select(
not_(and_(self.table2.c.field == 5,
self.table2.c.field == 7))),
"SELECT op.field FROM op WHERE NOT "
"(op.field = :field_1 AND op.field = :field_2)")
def test_operator_precedence_8(self):
self.assert_compile(
self.table2.select(
not_(
self.table2.c.field == 5)),
"SELECT op.field FROM op WHERE op.field != :field_1")
def test_operator_precedence_9(self):
self.assert_compile(self.table2.select(
not_(self.table2.c.field.between(5, 6))),
"SELECT op.field FROM op WHERE "
"op.field NOT BETWEEN :field_1 AND :field_2")
def test_operator_precedence_10(self):
self.assert_compile(
self.table2.select(
not_(
self.table2.c.field) == 5),
"SELECT op.field FROM op WHERE (NOT op.field) = :param_1")
def test_operator_precedence_11(self):
self.assert_compile(self.table2.select(
(self.table2.c.field == self.table2.c.field).
between(False, True)),
"SELECT op.field FROM op WHERE (op.field = op.field) "
"BETWEEN :param_1 AND :param_2")
def test_operator_precedence_12(self):
self.assert_compile(self.table2.select(
between((self.table2.c.field == self.table2.c.field),
False, True)),
"SELECT op.field FROM op WHERE (op.field = op.field) "
"BETWEEN :param_1 AND :param_2")
def test_operator_precedence_13(self):
self.assert_compile(
self.table2.select(
self.table2.c.field.match(
self.table2.c.field).is_(None)),
"SELECT op.field FROM op WHERE (op.field MATCH op.field) IS NULL")
def test_operator_precedence_collate_1(self):
self.assert_compile(
self.table1.c.name == literal('foo').collate('utf-8'),
"mytable.name = (:param_1 COLLATE utf-8)"
)
def test_operator_precedence_collate_2(self):
self.assert_compile(
(self.table1.c.name == literal('foo')).collate('utf-8'),
"mytable.name = :param_1 COLLATE utf-8"
)
def test_operator_precedence_collate_3(self):
self.assert_compile(
self.table1.c.name.collate('utf-8') == 'foo',
"(mytable.name COLLATE utf-8) = :param_1"
)
def test_operator_precedence_collate_4(self):
self.assert_compile(
and_(
(self.table1.c.name == literal('foo')).collate('utf-8'),
(self.table2.c.field == literal('bar')).collate('utf-8'),
),
"mytable.name = :param_1 COLLATE utf-8 "
"AND op.field = :param_2 COLLATE utf-8"
)
def test_operator_precedence_collate_5(self):
self.assert_compile(
select([self.table1.c.name]).order_by(
self.table1.c.name.collate('utf-8').desc()),
"SELECT mytable.name FROM mytable "
"ORDER BY mytable.name COLLATE utf-8 DESC"
)
def test_operator_precedence_collate_6(self):
self.assert_compile(
select([self.table1.c.name]).order_by(
self.table1.c.name.collate('utf-8').desc().nullslast()),
"SELECT mytable.name FROM mytable "
"ORDER BY mytable.name COLLATE utf-8 DESC NULLS LAST"
)
def test_operator_precedence_collate_7(self):
self.assert_compile(
select([self.table1.c.name]).order_by(
self.table1.c.name.collate('utf-8').asc()),
"SELECT mytable.name FROM mytable "
"ORDER BY mytable.name COLLATE utf-8 ASC"
)
def test_commutative_operators(self):
self.assert_compile(
literal("a") + literal("b") * literal("c"),
":param_1 || :param_2 * :param_3"
)
def test_op_operators(self):
self.assert_compile(
self.table1.select(self.table1.c.myid.op('hoho')(12) == 14),
"SELECT mytable.myid, mytable.name, mytable.description FROM "
"mytable WHERE (mytable.myid hoho :myid_1) = :param_1"
)
def test_op_operators_comma_precedence(self):
self.assert_compile(
func.foo(self.table1.c.myid.op('hoho')(12)),
"foo(mytable.myid hoho :myid_1)"
)
def test_op_operators_comparison_precedence(self):
self.assert_compile(
self.table1.c.myid.op('hoho')(12) == 5,
"(mytable.myid hoho :myid_1) = :param_1"
)
def test_op_operators_custom_precedence(self):
op1 = self.table1.c.myid.op('hoho', precedence=5)
op2 = op1(5).op('lala', precedence=4)(4)
op3 = op1(5).op('lala', precedence=6)(4)
self.assert_compile(op2, "mytable.myid hoho :myid_1 lala :param_1")
self.assert_compile(op3, "(mytable.myid hoho :myid_1) lala :param_1")
class OperatorAssociativityTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
def test_associativity_1(self):
f = column('f')
self.assert_compile(f - f, "f - f")
def test_associativity_2(self):
f = column('f')
self.assert_compile(f - f - f, "(f - f) - f")
def test_associativity_3(self):
f = column('f')
self.assert_compile((f - f) - f, "(f - f) - f")
def test_associativity_4(self):
f = column('f')
self.assert_compile((f - f).label('foo') - f, "(f - f) - f")
def test_associativity_5(self):
f = column('f')
self.assert_compile(f - (f - f), "f - (f - f)")
def test_associativity_6(self):
f = column('f')
self.assert_compile(f - (f - f).label('foo'), "f - (f - f)")
def test_associativity_7(self):
f = column('f')
# because - less precedent than /
self.assert_compile(f / (f - f), "f / (f - f)")
def test_associativity_8(self):
f = column('f')
self.assert_compile(f / (f - f).label('foo'), "f / (f - f)")
def test_associativity_9(self):
f = column('f')
self.assert_compile(f / f - f, "f / f - f")
def test_associativity_10(self):
f = column('f')
self.assert_compile((f / f) - f, "f / f - f")
def test_associativity_11(self):
f = column('f')
self.assert_compile((f / f).label('foo') - f, "f / f - f")
def test_associativity_12(self):
f = column('f')
# because / more precedent than -
self.assert_compile(f - (f / f), "f - f / f")
def test_associativity_13(self):
f = column('f')
self.assert_compile(f - (f / f).label('foo'), "f - f / f")
def test_associativity_14(self):
f = column('f')
self.assert_compile(f - f / f, "f - f / f")
def test_associativity_15(self):
f = column('f')
self.assert_compile((f - f) / f, "(f - f) / f")
def test_associativity_16(self):
f = column('f')
self.assert_compile(((f - f) / f) - f, "(f - f) / f - f")
def test_associativity_17(self):
f = column('f')
# - lower precedence than /
self.assert_compile((f - f) / (f - f), "(f - f) / (f - f)")
def test_associativity_18(self):
f = column('f')
# / higher precedence than -
self.assert_compile((f / f) - (f / f), "f / f - f / f")
def test_associativity_19(self):
f = column('f')
self.assert_compile((f / f) - (f - f), "f / f - (f - f)")
def test_associativity_20(self):
f = column('f')
self.assert_compile((f / f) / (f - f), "(f / f) / (f - f)")
def test_associativity_21(self):
f = column('f')
self.assert_compile(f / (f / (f - f)), "f / (f / (f - f))")
class InTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
table1 = table('mytable',
column('myid', Integer),
)
table2 = table(
'myothertable',
column('otherid', Integer),
column('othername', String)
)
def test_in_1(self):
self.assert_compile(self.table1.c.myid.in_(['a']),
"mytable.myid IN (:myid_1)")
def test_in_2(self):
self.assert_compile(~self.table1.c.myid.in_(['a']),
"mytable.myid NOT IN (:myid_1)")
def test_in_3(self):
self.assert_compile(self.table1.c.myid.in_(['a', 'b']),
"mytable.myid IN (:myid_1, :myid_2)")
def test_in_4(self):
self.assert_compile(self.table1.c.myid.in_(iter(['a', 'b'])),
"mytable.myid IN (:myid_1, :myid_2)")
def test_in_5(self):
self.assert_compile(self.table1.c.myid.in_([literal('a')]),
"mytable.myid IN (:param_1)")
def test_in_6(self):
self.assert_compile(self.table1.c.myid.in_([literal('a'), 'b']),
"mytable.myid IN (:param_1, :myid_1)")
def test_in_7(self):
self.assert_compile(
self.table1.c.myid.in_([literal('a'), literal('b')]),
"mytable.myid IN (:param_1, :param_2)")
def test_in_8(self):
self.assert_compile(self.table1.c.myid.in_(['a', literal('b')]),
"mytable.myid IN (:myid_1, :param_1)")
def test_in_9(self):
self.assert_compile(self.table1.c.myid.in_([literal(1) + 'a']),
"mytable.myid IN (:param_1 + :param_2)")
def test_in_10(self):
self.assert_compile(self.table1.c.myid.in_([literal('a') + 'a', 'b']),
"mytable.myid IN (:param_1 || :param_2, :myid_1)")
def test_in_11(self):
self.assert_compile(
self.table1.c.myid.in_(
[
literal('a') +
literal('a'),
literal('b')]),
"mytable.myid IN (:param_1 || :param_2, :param_3)")
def test_in_12(self):
self.assert_compile(self.table1.c.myid.in_([1, literal(3) + 4]),
"mytable.myid IN (:myid_1, :param_1 + :param_2)")
def test_in_13(self):
self.assert_compile(self.table1.c.myid.in_([literal('a') < 'b']),
"mytable.myid IN (:param_1 < :param_2)")
def test_in_14(self):
self.assert_compile(self.table1.c.myid.in_([self.table1.c.myid]),
"mytable.myid IN (mytable.myid)")
def test_in_15(self):
self.assert_compile(self.table1.c.myid.in_(['a', self.table1.c.myid]),
"mytable.myid IN (:myid_1, mytable.myid)")
def test_in_16(self):
self.assert_compile(self.table1.c.myid.in_([literal('a'),
self.table1.c.myid]),
"mytable.myid IN (:param_1, mytable.myid)")
def test_in_17(self):
self.assert_compile(
self.table1.c.myid.in_(
[
literal('a'),
self.table1.c.myid +
'a']),
"mytable.myid IN (:param_1, mytable.myid + :myid_1)")
def test_in_18(self):
self.assert_compile(
self.table1.c.myid.in_(
[
literal(1),
'a' +
self.table1.c.myid]),
"mytable.myid IN (:param_1, :myid_1 + mytable.myid)")
def test_in_19(self):
self.assert_compile(self.table1.c.myid.in_([1, 2, 3]),
"mytable.myid IN (:myid_1, :myid_2, :myid_3)")
def test_in_20(self):
self.assert_compile(self.table1.c.myid.in_(
select([self.table2.c.otherid])),
"mytable.myid IN (SELECT myothertable.otherid FROM myothertable)")
def test_in_21(self):
self.assert_compile(~self.table1.c.myid.in_(
select([self.table2.c.otherid])),
"mytable.myid NOT IN (SELECT myothertable.otherid FROM myothertable)")
def test_in_22(self):
self.assert_compile(
self.table1.c.myid.in_(
text("SELECT myothertable.otherid FROM myothertable")
),
"mytable.myid IN (SELECT myothertable.otherid "
"FROM myothertable)"
)
@testing.emits_warning('.*empty sequence.*')
def test_in_23(self):
self.assert_compile(self.table1.c.myid.in_([]),
"mytable.myid != mytable.myid")
def test_in_24(self):
self.assert_compile(
select([self.table1.c.myid.in_(select([self.table2.c.otherid]))]),
"SELECT mytable.myid IN (SELECT myothertable.otherid "
"FROM myothertable) AS anon_1 FROM mytable"
)
def test_in_25(self):
self.assert_compile(
select([self.table1.c.myid.in_(
select([self.table2.c.otherid]).as_scalar())]),
"SELECT mytable.myid IN (SELECT myothertable.otherid "
"FROM myothertable) AS anon_1 FROM mytable"
)
def test_in_26(self):
self.assert_compile(self.table1.c.myid.in_(
union(
select([self.table1.c.myid], self.table1.c.myid == 5),
select([self.table1.c.myid], self.table1.c.myid == 12),
)
), "mytable.myid IN ("
"SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_1 "
"UNION SELECT mytable.myid FROM mytable WHERE mytable.myid = :myid_2)")
def test_in_27(self):
# test that putting a select in an IN clause does not
# blow away its ORDER BY clause
self.assert_compile(
select([self.table1, self.table2],
self.table2.c.otherid.in_(
select([self.table2.c.otherid],
order_by=[self.table2.c.othername],
limit=10, correlate=False)
),
from_obj=[self.table1.join(self.table2,
self.table1.c.myid == self.table2.c.otherid)],
order_by=[self.table1.c.myid]
),
"SELECT mytable.myid, "
"myothertable.otherid, myothertable.othername FROM mytable "
"JOIN myothertable ON mytable.myid = myothertable.otherid "
"WHERE myothertable.otherid IN (SELECT myothertable.otherid "
"FROM myothertable ORDER BY myothertable.othername "
"LIMIT :param_1) ORDER BY mytable.myid",
{'param_1': 10}
)
def test_in_28(self):
self.assert_compile(
self.table1.c.myid.in_([None]),
"mytable.myid IN (NULL)"
)
@testing.emits_warning('.*empty sequence.*')
def test_in_29(self):
self.assert_compile(self.table1.c.myid.notin_([]),
"mytable.myid = mytable.myid")
@testing.emits_warning('.*empty sequence.*')
def test_in_30(self):
self.assert_compile(~self.table1.c.myid.in_([]),
"mytable.myid = mytable.myid")
class MathOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
table1 = table('mytable',
column('myid', Integer),
)
def _test_math_op(self, py_op, sql_op):
for (lhs, rhs, res) in (
(5, self.table1.c.myid, ':myid_1 %s mytable.myid'),
(5, literal(5), ':param_1 %s :param_2'),
(self.table1.c.myid, 'b', 'mytable.myid %s :myid_1'),
(self.table1.c.myid, literal(2.7), 'mytable.myid %s :param_1'),
(self.table1.c.myid, self.table1.c.myid,
'mytable.myid %s mytable.myid'),
(literal(5), 8, ':param_1 %s :param_2'),
(literal(6), self.table1.c.myid, ':param_1 %s mytable.myid'),
(literal(7), literal(5.5), ':param_1 %s :param_2'),
):
self.assert_compile(py_op(lhs, rhs), res % sql_op)
def test_math_op_add(self):
self._test_math_op(operator.add, '+')
def test_math_op_mul(self):
self._test_math_op(operator.mul, '*')
def test_math_op_sub(self):
self._test_math_op(operator.sub, '-')
def test_math_op_div(self):
if util.py3k:
self._test_math_op(operator.truediv, '/')
else:
self._test_math_op(operator.div, '/')
class ComparisonOperatorTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
table1 = table('mytable',
column('myid', Integer),
)
def test_pickle_operators_one(self):
clause = (self.table1.c.myid == 12) & \
self.table1.c.myid.between(15, 20) & \
self.table1.c.myid.like('hoho')
eq_(str(clause), str(util.pickle.loads(util.pickle.dumps(clause))))
def test_pickle_operators_two(self):
clause = tuple_(1, 2, 3)
eq_(str(clause), str(util.pickle.loads(util.pickle.dumps(clause))))
def _test_comparison_op(self, py_op, fwd_op, rev_op):
dt = datetime.datetime(2012, 5, 10, 15, 27, 18)
for (lhs, rhs, l_sql, r_sql) in (
('a', self.table1.c.myid, ':myid_1', 'mytable.myid'),
('a', literal('b'), ':param_2', ':param_1'), # note swap!
(self.table1.c.myid, 'b', 'mytable.myid', ':myid_1'),
(self.table1.c.myid, literal('b'), 'mytable.myid', ':param_1'),
(self.table1.c.myid, self.table1.c.myid,
'mytable.myid', 'mytable.myid'),
(literal('a'), 'b', ':param_1', ':param_2'),
(literal('a'), self.table1.c.myid, ':param_1', 'mytable.myid'),
(literal('a'), literal('b'), ':param_1', ':param_2'),
(dt, literal('b'), ':param_2', ':param_1'),
(literal('b'), dt, ':param_1', ':param_2'),
):
# the compiled clause should match either (e.g.):
# 'a' < 'b' -or- 'b' > 'a'.
compiled = str(py_op(lhs, rhs))
fwd_sql = "%s %s %s" % (l_sql, fwd_op, r_sql)
rev_sql = "%s %s %s" % (r_sql, rev_op, l_sql)
self.assert_(compiled == fwd_sql or compiled == rev_sql,
"\n'" + compiled + "'\n does not match\n'" +
fwd_sql + "'\n or\n'" + rev_sql + "'")
def test_comparison_operators_lt(self):
self._test_comparison_op(operator.lt, '<', '>'),
def test_comparison_operators_gt(self):
self._test_comparison_op(operator.gt, '>', '<')
def test_comparison_operators_eq(self):
self._test_comparison_op(operator.eq, '=', '=')
def test_comparison_operators_ne(self):
self._test_comparison_op(operator.ne, '!=', '!=')
def test_comparison_operators_le(self):
self._test_comparison_op(operator.le, '<=', '>=')
def test_comparison_operators_ge(self):
self._test_comparison_op(operator.ge, '>=', '<=')
class NonZeroTest(fixtures.TestBase):
def _raises(self, expr):
assert_raises_message(
TypeError,
"Boolean value of this clause is not defined",
bool, expr
)
def _assert_true(self, expr):
is_(bool(expr), True)
def _assert_false(self, expr):
is_(bool(expr), False)
def test_column_identity_eq(self):
c1 = column('c1')
self._assert_true(c1 == c1)
def test_column_identity_gt(self):
c1 = column('c1')
self._raises(c1 > c1)
def test_column_compare_eq(self):
c1, c2 = column('c1'), column('c2')
self._assert_false(c1 == c2)
def test_column_compare_gt(self):
c1, c2 = column('c1'), column('c2')
self._raises(c1 > c2)
def test_binary_identity_eq(self):
c1 = column('c1')
expr = c1 > 5
self._assert_true(expr == expr)
def test_labeled_binary_identity_eq(self):
c1 = column('c1')
expr = (c1 > 5).label(None)
self._assert_true(expr == expr)
def test_annotated_binary_identity_eq(self):
c1 = column('c1')
expr1 = (c1 > 5)
expr2 = expr1._annotate({"foo": "bar"})
self._assert_true(expr1 == expr2)
def test_labeled_binary_compare_gt(self):
c1 = column('c1')
expr1 = (c1 > 5).label(None)
expr2 = (c1 > 5).label(None)
self._assert_false(expr1 == expr2)
class NegationTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
table1 = table('mytable',
column('myid', Integer),
column('name', String),
)
def test_negate_operators_1(self):
for (py_op, op) in (
(operator.neg, '-'),
(operator.inv, 'NOT '),
):
for expr, expected in (
(self.table1.c.myid, "mytable.myid"),
(literal("foo"), ":param_1"),
):
self.assert_compile(py_op(expr), "%s%s" % (op, expected))
def test_negate_operators_2(self):
self.assert_compile(
self.table1.select((self.table1.c.myid != 12) &
~(self.table1.c.name == 'john')),
"SELECT mytable.myid, mytable.name FROM "
"mytable WHERE mytable.myid != :myid_1 "
"AND mytable.name != :name_1"
)
def test_negate_operators_3(self):
self.assert_compile(
self.table1.select((self.table1.c.myid != 12) &
~(self.table1.c.name.between('jack', 'john'))),
"SELECT mytable.myid, mytable.name FROM "
"mytable WHERE mytable.myid != :myid_1 AND "
"mytable.name NOT BETWEEN :name_1 AND :name_2"
)
def test_negate_operators_4(self):
self.assert_compile(
self.table1.select((self.table1.c.myid != 12) &
~and_(self.table1.c.name == 'john',
self.table1.c.name == 'ed',
self.table1.c.name == 'fred')),
"SELECT mytable.myid, mytable.name FROM "
"mytable WHERE mytable.myid != :myid_1 AND "
"NOT (mytable.name = :name_1 AND mytable.name = :name_2 "
"AND mytable.name = :name_3)"
)
def test_negate_operators_5(self):
self.assert_compile(
self.table1.select(
(self.table1.c.myid != 12) & ~self.table1.c.name),
"SELECT mytable.myid, mytable.name FROM "
"mytable WHERE mytable.myid != :myid_1 AND NOT mytable.name")
class LikeTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
table1 = table('mytable',
column('myid', Integer),
column('name', String),
)
def test_like_1(self):
self.assert_compile(
self.table1.c.myid.like('somstr'),
"mytable.myid LIKE :myid_1")
def test_like_2(self):
self.assert_compile(
~self.table1.c.myid.like('somstr'),
"mytable.myid NOT LIKE :myid_1")
def test_like_3(self):
self.assert_compile(
self.table1.c.myid.like('somstr', escape='\\'),
"mytable.myid LIKE :myid_1 ESCAPE '\\'")
def test_like_4(self):
self.assert_compile(
~self.table1.c.myid.like('somstr', escape='\\'),
"mytable.myid NOT LIKE :myid_1 ESCAPE '\\'")
def test_like_5(self):
self.assert_compile(
self.table1.c.myid.ilike('somstr', escape='\\'),
"lower(mytable.myid) LIKE lower(:myid_1) ESCAPE '\\'")
def test_like_6(self):
self.assert_compile(
~self.table1.c.myid.ilike('somstr', escape='\\'),
"lower(mytable.myid) NOT LIKE lower(:myid_1) ESCAPE '\\'")
def test_like_7(self):
self.assert_compile(
self.table1.c.myid.ilike('somstr', escape='\\'),
"mytable.myid ILIKE %(myid_1)s ESCAPE '\\\\'",
dialect=postgresql.dialect())
def test_like_8(self):
self.assert_compile(
~self.table1.c.myid.ilike('somstr', escape='\\'),
"mytable.myid NOT ILIKE %(myid_1)s ESCAPE '\\\\'",
dialect=postgresql.dialect())
def test_like_9(self):
self.assert_compile(
self.table1.c.name.ilike('%something%'),
"lower(mytable.name) LIKE lower(:name_1)")
def test_like_10(self):
self.assert_compile(
self.table1.c.name.ilike('%something%'),
"mytable.name ILIKE %(name_1)s",
dialect=postgresql.dialect())
def test_like_11(self):
self.assert_compile(
~self.table1.c.name.ilike('%something%'),
"lower(mytable.name) NOT LIKE lower(:name_1)")
def test_like_12(self):
self.assert_compile(
~self.table1.c.name.ilike('%something%'),
"mytable.name NOT ILIKE %(name_1)s",
dialect=postgresql.dialect())
class BetweenTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
table1 = table('mytable',
column('myid', Integer),
column('name', String),
)
def test_between_1(self):
self.assert_compile(
self.table1.c.myid.between(1, 2),
"mytable.myid BETWEEN :myid_1 AND :myid_2")
def test_between_2(self):
self.assert_compile(
~self.table1.c.myid.between(1, 2),
"mytable.myid NOT BETWEEN :myid_1 AND :myid_2")
def test_between_3(self):
self.assert_compile(
self.table1.c.myid.between(1, 2, symmetric=True),
"mytable.myid BETWEEN SYMMETRIC :myid_1 AND :myid_2")
def test_between_4(self):
self.assert_compile(
~self.table1.c.myid.between(1, 2, symmetric=True),
"mytable.myid NOT BETWEEN SYMMETRIC :myid_1 AND :myid_2")
def test_between_5(self):
self.assert_compile(
between(self.table1.c.myid, 1, 2, symmetric=True),
"mytable.myid BETWEEN SYMMETRIC :myid_1 AND :myid_2")
def test_between_6(self):
self.assert_compile(
~between(self.table1.c.myid, 1, 2, symmetric=True),
"mytable.myid NOT BETWEEN SYMMETRIC :myid_1 AND :myid_2")
class MatchTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
table1 = table('mytable',
column('myid', Integer),
column('name', String),
)
def test_match_1(self):
self.assert_compile(self.table1.c.myid.match('somstr'),
"mytable.myid MATCH ?",
dialect=sqlite.dialect())
def test_match_2(self):
self.assert_compile(
self.table1.c.myid.match('somstr'),
"MATCH (mytable.myid) AGAINST (%s IN BOOLEAN MODE)",
dialect=mysql.dialect())
def test_match_3(self):
self.assert_compile(self.table1.c.myid.match('somstr'),
"CONTAINS (mytable.myid, :myid_1)",
dialect=mssql.dialect())
def test_match_4(self):
self.assert_compile(self.table1.c.myid.match('somstr'),
"mytable.myid @@ to_tsquery(%(myid_1)s)",
dialect=postgresql.dialect())
def test_match_5(self):
self.assert_compile(self.table1.c.myid.match('somstr'),
"CONTAINS (mytable.myid, :myid_1)",
dialect=oracle.dialect())
class ComposedLikeOperatorsTest(fixtures.TestBase, testing.AssertsCompiledSQL):
__dialect__ = 'default'
def test_contains(self):
self.assert_compile(
column('x').contains('y'),
"x LIKE '%%' || :x_1 || '%%'",
checkparams={'x_1': 'y'}
)
def test_contains_escape(self):
self.assert_compile(
column('x').contains('y', escape='\\'),
"x LIKE '%%' || :x_1 || '%%' ESCAPE '\\'",
checkparams={'x_1': 'y'}
)
def test_contains_literal(self):
self.assert_compile(
column('x').contains(literal_column('y')),
"x LIKE '%%' || y || '%%'",
checkparams={}
)
def test_contains_text(self):
self.assert_compile(
column('x').contains(text('y')),
"x LIKE '%%' || y || '%%'",
checkparams={}
)
def test_not_contains(self):
self.assert_compile(
~column('x').contains('y'),
"x NOT LIKE '%%' || :x_1 || '%%'",
checkparams={'x_1': 'y'}
)
def test_not_contains_escape(self):
self.assert_compile(
~column('x').contains('y', escape='\\'),
"x NOT LIKE '%%' || :x_1 || '%%' ESCAPE '\\'",
checkparams={'x_1': 'y'}
)
def test_contains_concat(self):
self.assert_compile(
column('x').contains('y'),
"x LIKE concat(concat('%%', %s), '%%')",
checkparams={'x_1': 'y'},
dialect=mysql.dialect()
)
def test_not_contains_concat(self):
self.assert_compile(
~column('x').contains('y'),
"x NOT LIKE concat(concat('%%', %s), '%%')",
checkparams={'x_1': 'y'},
dialect=mysql.dialect()
)
def test_contains_literal_concat(self):
self.assert_compile(
column('x').contains(literal_column('y')),
"x LIKE concat(concat('%%', y), '%%')",
checkparams={},
dialect=mysql.dialect()
)
def test_contains_text_concat(self):
self.assert_compile(
column('x').contains(text('y')),
"x LIKE concat(concat('%%', y), '%%')",
checkparams={},
dialect=mysql.dialect()
)
def test_startswith(self):
self.assert_compile(
column('x').startswith('y'),
"x LIKE :x_1 || '%%'",
checkparams={'x_1': 'y'}
)
def test_startswith_escape(self):
self.assert_compile(
column('x').startswith('y', escape='\\'),
"x LIKE :x_1 || '%%' ESCAPE '\\'",
checkparams={'x_1': 'y'}
)
def test_not_startswith(self):
self.assert_compile(
~column('x').startswith('y'),
"x NOT LIKE :x_1 || '%%'",
checkparams={'x_1': 'y'}
)
def test_not_startswith_escape(self):
self.assert_compile(
~column('x').startswith('y', escape='\\'),
"x NOT LIKE :x_1 || '%%' ESCAPE '\\'",
checkparams={'x_1': 'y'}
)
def test_startswith_literal(self):
self.assert_compile(
column('x').startswith(literal_column('y')),
"x LIKE y || '%%'",
checkparams={}
)
def test_startswith_text(self):
self.assert_compile(
column('x').startswith(text('y')),
"x LIKE y || '%%'",
checkparams={}
)
def test_startswith_concat(self):
self.assert_compile(
column('x').startswith('y'),
"x LIKE concat(%s, '%%')",
checkparams={'x_1': 'y'},
dialect=mysql.dialect()
)
def test_not_startswith_concat(self):
self.assert_compile(
~column('x').startswith('y'),
"x NOT LIKE concat(%s, '%%')",
checkparams={'x_1': 'y'},
dialect=mysql.dialect()
)
def test_startswith_firebird(self):
self.assert_compile(
column('x').startswith('y'),
"x STARTING WITH :x_1",
checkparams={'x_1': 'y'},
dialect=firebird.dialect()
)
def test_not_startswith_firebird(self):
self.assert_compile(
~column('x').startswith('y'),
"x NOT STARTING WITH :x_1",
checkparams={'x_1': 'y'},
dialect=firebird.dialect()
)
def test_startswith_literal_mysql(self):
self.assert_compile(
column('x').startswith(literal_column('y')),
"x LIKE concat(y, '%%')",
checkparams={},
dialect=mysql.dialect()
)
def test_startswith_text_mysql(self):
self.assert_compile(
column('x').startswith(text('y')),
"x LIKE concat(y, '%%')",
checkparams={},
dialect=mysql.dialect()
)
def test_endswith(self):
self.assert_compile(
column('x').endswith('y'),
"x LIKE '%%' || :x_1",
checkparams={'x_1': 'y'}
)
def test_endswith_escape(self):
self.assert_compile(
column('x').endswith('y', escape='\\'),
"x LIKE '%%' || :x_1 ESCAPE '\\'",
checkparams={'x_1': 'y'}
)
def test_not_endswith(self):
self.assert_compile(
~column('x').endswith('y'),
"x NOT LIKE '%%' || :x_1",
checkparams={'x_1': 'y'}
)
def test_not_endswith_escape(self):
self.assert_compile(
~column('x').endswith('y', escape='\\'),
"x NOT LIKE '%%' || :x_1 ESCAPE '\\'",
checkparams={'x_1': 'y'}
)
def test_endswith_literal(self):
self.assert_compile(
column('x').endswith(literal_column('y')),
"x LIKE '%%' || y",
checkparams={}
)
def test_endswith_text(self):
self.assert_compile(
column('x').endswith(text('y')),
"x LIKE '%%' || y",
checkparams={}
)
def test_endswith_mysql(self):
self.assert_compile(
column('x').endswith('y'),
"x LIKE concat('%%', %s)",
checkparams={'x_1': 'y'},
dialect=mysql.dialect()
)
def test_not_endswith_mysql(self):
self.assert_compile(
~column('x').endswith('y'),
"x NOT LIKE concat('%%', %s)",
checkparams={'x_1': 'y'},
dialect=mysql.dialect()
)
def test_endswith_literal_mysql(self):
self.assert_compile(
column('x').endswith(literal_column('y')),
"x LIKE concat('%%', y)",
checkparams={},
dialect=mysql.dialect()
)
def test_endswith_text_mysql(self):
self.assert_compile(
column('x').endswith(text('y')),
"x LIKE concat('%%', y)",
checkparams={},
dialect=mysql.dialect()
)
class CustomOpTest(fixtures.TestBase):
def test_is_comparison(self):
c = column('x')
c2 = column('y')
op1 = c.op('$', is_comparison=True)(c2).operator
op2 = c.op('$', is_comparison=False)(c2).operator
assert operators.is_comparison(op1)
assert not operators.is_comparison(op2)
class TupleTypingTest(fixtures.TestBase):
def _assert_types(self, expr):
eq_(expr.clauses[0].type._type_affinity, Integer)
eq_(expr.clauses[1].type._type_affinity, String)
eq_(expr.clauses[2].type._type_affinity, LargeBinary()._type_affinity)
def test_type_coersion_on_eq(self):
a, b, c = column(
'a', Integer), column(
'b', String), column(
'c', LargeBinary)
t1 = tuple_(a, b, c)
expr = t1 == (3, 'hi', 'there')
self._assert_types(expr.right)
def test_type_coersion_on_in(self):
a, b, c = column(
'a', Integer), column(
'b', String), column(
'c', LargeBinary)
t1 = tuple_(a, b, c)
expr = t1.in_([(3, 'hi', 'there'), (4, 'Q', 'P')])
eq_(len(expr.right.clauses), 2)
for elem in expr.right.clauses:
self._assert_types(elem)
| [
"[email protected]"
] | |
7d3a565b843d3a511283b8290b2e3e98f9f02a74 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/soisson.py | 2f90d49960b18e683a39c2e7e75ccc653b9bb91e | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 178 | py | ii = [('WilbRLW4.py', 1), ('CarlTFR.py', 6), ('CookGHP2.py', 1), ('KiddJAE.py', 1), ('ClarGE.py', 2), ('BuckWGM.py', 2), ('WadeJEB.py', 1), ('GodwWLN.py', 1), ('BuckWGM2.py', 1)] | [
"[email protected]"
] | |
598aa5789fc89d20614a949df27117f073692147 | b2c780661aec8076a0b6d00bf8ea0d443a117df6 | /Popularity/DCAFPilot/test/utils_t.py | b5af29934995578af40c4def334385a5c2d302eb | [] | no_license | maitdaoud/DMWMAnalytics | 894fa2afb8d83a5275f0abd61b74f4f839150cb0 | fec7ef3e5240973db96ba53179940950002adbd8 | refs/heads/master | 2020-04-11T03:33:43.164136 | 2017-04-01T14:07:42 | 2017-04-01T14:07:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 975 | py | #!/usr/bin/env python
#pylint: disable-msg=C0301,C0103
"""
Unit test for StorageManager class
"""
import os
import re
import time
import unittest
from pymongo import MongoClient
from DCAF.utils.utils import popdb_date, ndays
class testStorageManager(unittest.TestCase):
"""
A test class for the StorageManager class
"""
def setUp(self):
"set up connection"
pass
def tearDown(self):
"Perform clean-up"
pass
def test_popdb_date(self):
"Test popdb_date method"
result = popdb_date('20140105')
expect = '2014-1-5'
self.assertEqual(expect, result)
result = popdb_date(expect)
self.assertEqual(expect, result)
def test_ndays(self):
"Test ndays function"
time1, time2 = '20141120', '20141124'
result = ndays(time1, time2)
expect = 4
self.assertEqual(expect, result)
#
# main
#
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
3c699961c03db0286e4b397de0a722d189504754 | 30e2a85fc560165a16813b0486a862317c7a486a | /datastruct_algorithm/jan.py | bb5cbcfb654440320b08cce91cc4251879eb8dfd | [] | no_license | muryliang/python_prac | 2f65b6fdb86c3b3a44f0c6452a154cd497eb2d01 | 0301e8f523a2e31e417fd99a968ad8414e9a1e08 | refs/heads/master | 2021-01-21T11:03:48.397178 | 2017-09-18T04:13:27 | 2017-09-18T04:13:27 | 68,801,688 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,328 | py | import time
import sys
def perform(a, b, goal, failset, trueset):
"""a is limaL, b is limbL, failset is a list failseting action"""
# time.sleep(1)
# print(a, b)
global lima
global limb
res = False
if a == goal or b == goal or a + b == goal:
return True
if res is False and a > 0 and b < limb:
ares = max(a - (limb-b), 0)
bres = min(limb, b + a)
if (ares , bres) not in failset:
failset.append((ares, bres) )
res = perform(ares, bres, goal, failset, trueset)
if res:
trueset.append("rmove")
if res is False and b > 0 and a < lima:
ares = min(lima, a + b)
bres = max(b - (lima-a), 0)
if (ares , bres) not in failset:
failset.append((ares, bres))
res = perform(ares, bres, goal, failset, trueset)
if res:
trueset.append("lmove")
if res is False and b > 0:
ares = a
bres = 0
if (ares , bres) not in failset:
failset.append((ares, bres))
res = perform(ares, bres, goal, failset, trueset)
if res:
trueset.append("drop b")
if res is False and a > 0:
ares = 0
bres = b
if (ares , bres) not in failset:
failset.append((ares, bres))
res = perform(ares, bres, goal, failset, trueset)
if res:
trueset.append("drop a")
if res is False and a < lima:
ares = lima
bres = b
if (ares , bres) not in failset:
failset.append((ares, bres))
res = perform(ares, bres, goal, failset, trueset)
if res:
trueset.append("fill a")
if res is False and b < limb:
ares = a
bres = limb
if (ares , bres) not in failset:
failset.append((ares, bres))
res = perform(ares, bres, goal, failset, trueset)
if res:
trueset.append("fill b")
# if res is False:
# print ("nothing true, return")
return res
failset = [(0,0)]
trueset = list()
lima = int(sys.argv[1])
limb = int(sys.argv[2])
goal = int(sys.argv[3])
if perform(0, 0, goal, failset, trueset):
print ("success")
else:
print ("fail")
print (list(reversed(trueset)))
| [
"[email protected]"
] | |
3724941a22eb118782c4c142d7dc6097e8d37e35 | ad13583673551857615498b9605d9dcab63bb2c3 | /output/instances/nistData/atomic/integer/Schema+Instance/NISTXML-SV-IV-atomic-integer-fractionDigits-1-3.py | 32add0c922d5342c7b50eaabb85bc7ee39adc0d0 | [
"MIT"
] | permissive | tefra/xsdata-w3c-tests | 397180205a735b06170aa188f1f39451d2089815 | 081d0908382a0e0b29c8ee9caca6f1c0e36dd6db | refs/heads/main | 2023-08-03T04:25:37.841917 | 2023-07-29T17:10:13 | 2023-07-30T12:11:13 | 239,622,251 | 2 | 0 | MIT | 2023-07-25T14:19:04 | 2020-02-10T21:59:47 | Python | UTF-8 | Python | false | false | 297 | py | from output.models.nist_data.atomic.integer.schema_instance.nistschema_sv_iv_atomic_integer_fraction_digits_1_xsd.nistschema_sv_iv_atomic_integer_fraction_digits_1 import NistschemaSvIvAtomicIntegerFractionDigits1
obj = NistschemaSvIvAtomicIntegerFractionDigits1(
value=825606520242485152
)
| [
"[email protected]"
] | |
bb8c7aede0462de9cd8180f39a0e1b02e5216735 | d3c4848338fe8a36a307c955e8a96f32fc880019 | /tests/test_selenium_common.py | f430bf2ca7c458bc29d650063b205594ee3e569e | [
"MIT"
] | permissive | panarahc/product-database | d111555f5f801c18a7a46c7fd3a2173149d8acd3 | af48bc3e580e3bd7b499990bb7c51aabed242f71 | refs/heads/master | 2023-04-19T16:09:08.115666 | 2021-01-17T22:23:45 | 2021-01-17T22:23:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 42,371 | py | """
Test suite for the selenium test cases
"""
import os
import pytest
import time
import re
from django.urls import reverse
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.select import Select
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
from tests import BaseSeleniumTest
@pytest.mark.online
@pytest.mark.selenium
class TestCommonFunctions(BaseSeleniumTest):
def test_login_only_mode(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# open the homepage
browser.get(liveserver + reverse("productdb:home"))
expected_homepage_text = "This database contains information about network equipment like routers and " \
"switches from multiple vendors."
assert expected_homepage_text in browser.find_element_by_tag_name("body").text
# Login as superuser - verify, that the "continue without login" button is visible
browser.find_element_by_id("navbar_login").click()
time.sleep(3)
expected_login_continue_text = "continue without login"
assert expected_login_continue_text in browser.find_element_by_tag_name("body").text
# login as superuser
browser.find_element_by_id("username").send_keys(self.ADMIN_USERNAME)
browser.find_element_by_id("password").send_keys(self.ADMIN_PASSWORD)
browser.find_element_by_id("login_button").click()
time.sleep(3)
# change settings to login only mode and save settings
browser.find_element_by_id("navbar_admin").click()
browser.find_element_by_id("navbar_admin_settings").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Settings")
browser.find_element_by_id("id_login_only_mode").click()
browser.find_element_by_id("submit").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Settings saved successfully")
# go to the Product Database Homepage - it must be visible
browser.get(liveserver + reverse("productdb:home"))
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_homepage_text)
# create the product list for the test case
test_pl_name = "LoginOnly Product List"
test_pl_description = "A sample description for the Product List."
test_pl_product_list_ids = "C2960X-STACK;CAB-ACE\nWS-C2960-24TT-L;WS-C2960-24TC-S"
test_pl_product_list_id = "C2960X-STACK"
browser.find_element_by_id("product_list_link").click()
WebDriverWait(browser, 10).until(EC.presence_of_element_located((
By.XPATH,
"id('product_list_table_wrapper')")
))
browser.find_element_by_xpath("//button[span='Add New']").click()
WebDriverWait(browser, 10).until(EC.presence_of_element_located((By.ID, "id_name")))
browser.find_element_by_id("id_name").send_keys(test_pl_name)
browser.find_element_by_id("id_description").send_keys(test_pl_description)
browser.find_element_by_id("id_string_product_list").send_keys(test_pl_product_list_ids)
browser.find_element_by_id("id_vendor").send_keys("C")
browser.find_element_by_id("submit").click()
WebDriverWait(browser, 10).until(EC.presence_of_element_located((
By.XPATH,
"id('product_list_table_wrapper')")
))
# logout - the login screen is visible
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_logout").click()
expected_login_text = "Please enter your credentials below."
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_login_text)
# go manually to the Product Database Homepage - you must be redirected to the login screen
browser.get(liveserver + reverse("productdb:home"))
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_login_text)
# verify, that the "continue without login" button is not visible
assert expected_login_continue_text not in browser.find_element_by_tag_name("body").text
# the product list must be reachable, even when in login only mode
pl = self.api_helper.get_product_list_by_name(liveserver, test_pl_name)
browser.get(liveserver + reverse("productdb:share-product_list", kwargs={"product_list_id": pl["id"]}))
# verify some basic attributes of the page
body = browser.find_element_by_tag_name("body").text
assert test_pl_name in body
assert test_pl_description in body
assert test_pl_product_list_id in body
assert "maintained by %s" % self.ADMIN_DISPLAY_NAME in body
assert "%s</a>" % test_pl_product_list_id not in body, \
"Link to Product Details should not be available"
# login as API user
browser.get(liveserver + reverse("productdb:home"))
browser.find_element_by_id("username").send_keys(self.API_USERNAME)
browser.find_element_by_id("password").send_keys(self.API_PASSWORD)
browser.find_element_by_id("login_button").click()
time.sleep(3)
# the Product Database Homepage must be visible
assert expected_homepage_text in browser.find_element_by_tag_name("body").text
# disable the login only mode
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_logout").click()
browser.find_element_by_id("username").send_keys(self.ADMIN_USERNAME)
browser.find_element_by_id("password").send_keys(self.ADMIN_PASSWORD)
browser.find_element_by_id("login_button").click()
time.sleep(3)
browser.find_element_by_id("navbar_admin").click()
browser.find_element_by_id("navbar_admin_settings").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Settings")
assert "Settings" in browser.find_element_by_tag_name("body").text
browser.find_element_by_id("id_login_only_mode").click()
browser.find_element_by_id("submit").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Settings saved successfully")
# delete the new product list
browser.get(liveserver + reverse("productdb:list-product_lists"))
browser.find_element_by_xpath("id('product_list_table')/tbody/tr[1]/td[2]").click()
time.sleep(1)
browser.find_element_by_xpath("//button[span='Delete Selected']").click()
time.sleep(3)
body = browser.find_element_by_tag_name("body").text
assert "Delete Product List" in body
browser.find_element_by_name("really_delete").click()
browser.find_element_by_id("submit").click()
time.sleep(3)
# verify that the product list is deleted
body = browser.find_element_by_tag_name("body").text
assert test_pl_description not in body
assert "Product List %s successfully deleted." % test_pl_name in body
# end session
self.logout_user(browser)
def test_change_password(self, browser, liveserver):
"""
test change password procedure with a different user (part of the selenium_tests fixture)
"""
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# login as the default API user
browser.get(liveserver + reverse("login"))
browser.find_element_by_id("username").send_keys("testpasswordchange")
browser.find_element_by_id("password").send_keys("api")
browser.find_element_by_id("login_button").click()
time.sleep(3)
# go to the change password dialog
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_change_password").click()
time.sleep(3)
assert "Old password" in browser.find_element_by_tag_name("body").text
# chang the password to api1234
browser.find_element_by_id("id_old_password").send_keys("api")
browser.find_element_by_id("id_new_password1").send_keys("api1234")
browser.find_element_by_id("id_new_password2").send_keys("api1234")
browser.find_element_by_id("submit").click()
time.sleep(3)
assert "Password change successful" in browser.find_element_by_tag_name("body").text
# logout
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_logout").click()
time.sleep(3)
expected_login_text = "Please enter your credentials below."
assert expected_login_text in browser.find_element_by_tag_name("body").text
# login with new password
browser.find_element_by_id("username").send_keys("testpasswordchange")
browser.find_element_by_id("password").send_keys("api1234")
browser.find_element_by_id("login_button").click()
time.sleep(3)
# the Product Database Homepage must be visible
expected_text = "This database contains information about network equipment like routers and " \
"switches from multiple vendors."
assert expected_text in browser.find_element_by_tag_name("body").text
# end session
self.logout_user(browser)
@pytest.mark.selenium
class TestUserProfile(BaseSeleniumTest):
def test_preferred_vendor_user_profile(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
browser.get(liveserver + reverse("productdb:home"))
# verify the vendor selection if the user is not logged in
browser.find_element_by_id("nav_browse").click()
browser.find_element_by_id("nav_browse_all_vendor_products").click()
assert "Browse Products by Vendor" in browser.find_element_by_class_name("page-header").text, \
"Should view the Browse Product by Vendor page"
# login
browser.find_element_by_id("navbar_login").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Please enter your credentials below.")
homepage_message = "Browse Products by Vendor"
self.login_user(browser, self.API_USERNAME, self.API_PASSWORD, homepage_message)
# verify the selected default vendor
pref_vendor_select = browser.find_element_by_id("vendor_selection")
assert "Cisco Systems" in pref_vendor_select.text, "selected by default"
# view the edit settings page
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_user_profile").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Edit User Profile")
# verify that the vendor with the ID 1 is selected
pref_vendor_select = browser.find_element_by_id("id_preferred_vendor")
assert "Cisco Systems" in pref_vendor_select.text
pref_vendor_select = Select(pref_vendor_select)
# change the vendor selection
changed_vendor_name = "Juniper Networks"
pref_vendor_select.select_by_visible_text(changed_vendor_name)
browser.find_element_by_id("submit").send_keys(Keys.ENTER)
# redirect to the Browse Products by Vendor
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Browse Products by Vendor")
# verify that the new default vendor is selected
pref_vendor_select = browser.find_element_by_id("vendor_selection")
assert changed_vendor_name in pref_vendor_select.text
# end session
self.logout_user(browser)
def test_email_change_in_user_profile(self, browser, liveserver):
"""
use separate user from the selenium_tests fixture
"""
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
browser.get(liveserver + reverse("productdb:home"))
# login
browser.find_element_by_id("navbar_login").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Please enter your credentials below.")
homepage_message = "This database contains information about network equipment like routers and switches " \
"from multiple vendors."
self.login_user(browser, "testuserprofilemail", self.API_PASSWORD, homepage_message)
# view the edit settings page
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_user_profile").click()
assert "[email protected]" in browser.find_element_by_id("id_email").get_attribute('value')
# change email
new_email = "[email protected]"
browser.find_element_by_id("id_email").clear()
browser.find_element_by_id("id_email").send_keys(new_email)
browser.find_element_by_id("submit").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, homepage_message)
# verify redirect to homepage
assert "User Profile successful updated" in browser.find_element_by_tag_name("body").text, \
"Should view a message that the user profile was saved"
# verify new value in email address
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_user_profile").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Edit User Profile")
assert new_email in browser.find_element_by_id("id_email").get_attribute('value'), \
"Show view the correct email address of the user (%s)" % new_email
# end session
self.logout_user(browser)
def test_search_option_in_user_profile(self, browser, liveserver):
"""
use separate user from the selenium_tests fixture
"""
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
search_term = "WS-C2960X-24T(D|S)"
browser.get(liveserver + reverse("productdb:home"))
# login
homepage_message = "This database contains information about network equipment like routers and switches " \
"from multiple vendors."
browser.find_element_by_id("navbar_login").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Please enter your credentials below.")
self.login_user(browser, "testregexsession", self.API_PASSWORD, homepage_message)
# go to the all products view
expected_content = "On this page, you can view all products that are stored in the database."
browser.find_element_by_id("nav_browse").click()
browser.find_element_by_id("nav_browse_all_products").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_content)
# try to search for the product
browser.find_element_by_id("column_search_Product ID").send_keys(search_term)
self.wait_for_text_to_be_displayed_in_body_tag(browser, "No matching records found")
# enable the regular expression search feature in the user profile
browser.find_element_by_id("navbar_loggedin").click()
browser.find_element_by_id("navbar_loggedin_user_profile").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Contact eMail:")
expected_content = "On this page, you can view all products that are stored in the database."
browser.find_element_by_id("id_regex_search").click()
browser.find_element_by_id("submit").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_content)
browser.find_element_by_id("column_search_Product ID").send_keys(search_term)
time.sleep(3)
assert "WS-C2960X-24TS" in browser.find_element_by_tag_name("body").text, \
"Should show no results (regular expression is used but by default not enabled)"
assert "WS-C2960X-24TD" in browser.find_element_by_tag_name("body").text, \
"Should show no results (regular expression is used but by default not enabled)"
# end session
self.logout_user(browser)
@pytest.mark.selenium
class TestProductLists(BaseSeleniumTest):
def test_product_list(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
add_button_xpath = "//button[span='Add New']"
edit_button_xpath = "//button[span='Edit Selected']"
delete_button_xpath = "//button[span='Delete Selected']"
test_pl_name = "Test Product List"
test_pl_description = "A sample description for the Product List."
test_pl_product_list_ids = "C2960X-STACK;CAB-ACE\nWS-C2960-24TT-L;WS-C2960-24TC-S"
test_pl_product_list_id = "C2960X-STACK"
# open the homepage
browser.get(liveserver + reverse("productdb:home"))
# go to product list view
browser.find_element_by_id("nav_browse").click()
browser.find_element_by_id("nav_browse_all_product_lists").click()
time.sleep(3)
# verify that the add, edit and delete button is not visible
body = browser.find_element_by_tag_name("body").text
assert "Add New" not in body
assert "Edit Selected" not in body
assert "Delete Selected" not in body
# login to the page as admin user
browser.find_element_by_id("navbar_login").click()
time.sleep(3)
self.login_user(browser, self.ADMIN_USERNAME, self.ADMIN_PASSWORD, "All Product Lists")
# verify that the add, edit and delete buttons are visible
body = browser.find_element_by_tag_name("body").text
assert "Add New" in body
assert "Edit Selected" in body
assert "Delete Selected" in body
# create a new product list
browser.find_element_by_xpath(add_button_xpath).click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Add Product List")
browser.find_element_by_id("id_name").send_keys(test_pl_name)
browser.find_element_by_id("id_description").send_keys(test_pl_description)
browser.find_element_by_id("id_string_product_list").send_keys(test_pl_product_list_ids)
browser.find_element_by_id("id_vendor").send_keys("C")
browser.find_element_by_id("submit").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "All Product Lists")
assert test_pl_name in browser.find_element_by_tag_name("body").text
# view the newly created product list
browser.find_element_by_link_text(test_pl_name).click()
time.sleep(3)
body = browser.find_element_by_tag_name("body").text
assert test_pl_name in body
assert test_pl_description in body
assert test_pl_product_list_id in body
assert "maintained by %s" % self.ADMIN_DISPLAY_NAME in body
assert browser.find_element_by_link_text(test_pl_product_list_id) is not None, \
"Link to Product Details should be available"
# go back to the product list overview
browser.find_element_by_id("_back").click()
# edit the new product list
browser.find_element_by_xpath("id('product_list_table')/tbody/tr[1]/td[2]").click()
time.sleep(3)
browser.find_element_by_xpath(edit_button_xpath).click()
time.sleep(3)
browser.find_element_by_id("id_description").send_keys(" EDITED")
test_pl_description += " EDITED"
browser.find_element_by_id("submit").click()
time.sleep(3)
body = browser.find_element_by_tag_name("body").text
assert test_pl_description in body
# delete the new product list
browser.find_element_by_xpath("id('product_list_table')/tbody/tr[1]/td[2]").click()
time.sleep(1)
browser.find_element_by_xpath(delete_button_xpath).click()
time.sleep(3)
body = browser.find_element_by_tag_name("body").text
assert "Delete Product List" in body
browser.find_element_by_name("really_delete").click()
browser.find_element_by_id("submit").click()
time.sleep(3)
# verify that the product list is deleted
body = browser.find_element_by_tag_name("body").text
assert test_pl_description not in body
assert "Product List %s successfully deleted." % test_pl_name in body
@pytest.mark.selenium
class TestProductDatabaseViews(BaseSeleniumTest):
def test_search_on_homepage(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# navigate to the homepage
browser.get(liveserver + reverse("productdb:home"))
browser.find_element_by_id("search_text_field").send_keys("WS-C2960X-24")
browser.find_element_by_id("submit_search").click()
# verify page by page title
assert "All Products" in browser.find_element_by_tag_name("body").text
time.sleep(2)
# test table content
expected_table_content = """Vendor Product ID Description List Price Lifecycle State"""
contain_table_rows = [
"Cisco Systems WS-C2960X-24PD-L Catalyst 2960-X 24 GigE PoE 370W, 2 x 10G SFP+, LAN Base 4595.00 USD",
"Cisco Systems WS-C2960X-24PS-L Catalyst 2960-X 24 GigE PoE 370W, 4 x 1G SFP, LAN Base 3195.00 USD",
]
not_contain_table_rows = [
"Juniper Networks"
]
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
for r in contain_table_rows:
assert r in table.text
for r in not_contain_table_rows:
assert r not in table.text
def test_product_group_view(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# navigate to the homepage
browser.get(liveserver + reverse("productdb:home"))
# go to the "All Product Groups" view
browser.find_element_by_id("nav_browse").click()
browser.find_element_by_id("nav_browse_all_product_groups").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "All Product Groups")
# test table content
expected_table_content = """Vendor\nName"""
table_rows = [
'Cisco Systems Catalyst 3850',
'Cisco Systems Catalyst 2960X',
'Cisco Systems Catalyst 2960',
'Juniper Networks EX2200',
]
table = browser.find_element_by_id('product_group_table')
self.wait_for_text_to_be_displayed_in_body_tag(browser, expected_table_content)
for r in table_rows:
assert r in table.text
# search product group by vendor column
table_rows = [
'Juniper Networks EX2200',
]
browser.find_element_by_id("column_search_Vendor").send_keys("Juni")
table = browser.find_element_by_id('product_group_table')
assert expected_table_content in table.text
for r in table_rows:
assert r in table.text
browser.find_element_by_id("column_search_Vendor").clear()
# search product group by vendor column
table_rows = [
'Cisco Systems Catalyst 3850',
'Cisco Systems Catalyst 2960X',
'Cisco Systems Catalyst 2960',
]
browser.find_element_by_id("column_search_Name").send_keys("yst")
time.sleep(2)
table = browser.find_element_by_id('product_group_table')
assert expected_table_content in table.text
for r in table_rows:
assert r in table.text
browser.find_element_by_id("column_search_Name").clear()
time.sleep(2)
# click on the "Catalyst 2960X" link
browser.find_element_by_partial_link_text("Catalyst 2960X").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Catalyst 2960X Product Group details")
# verify table content
expected_table_content = """Product ID\nDescription\nList Price Lifecycle State"""
table_rows = [
'C2960X-STACK',
'CAB-ACE',
'CAB-STK-E-0.5M',
]
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
for r in table_rows:
assert r in table.text
# search product group by vendor column
table_rows = [
'WS-C2960X-24PD-L',
'WS-C2960X-24TD-L',
]
browser.find_element_by_id("column_search_Description").send_keys("2 x")
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
for r in table_rows:
assert r in table.text
browser.find_element_by_id("column_search_Description").clear()
time.sleep(2)
# open detail page
browser.find_element_by_partial_link_text("C2960X-STACK").click()
detail_link = browser.current_url
self.wait_for_text_to_be_displayed_in_body_tag(browser, "C2960X-STACK Product details")
# verify that the "Internal Product ID" is not visible (because not set)
assert "Internal Product ID" not in browser.find_element_by_tag_name("body").text
# add an internal product ID and verify that it is visible
test_internal_product_id = "123456789-abcdef"
p = self.api_helper.update_product(liveserver_url=liveserver, product_id="C2960X-STACK",
internal_product_id=test_internal_product_id)
browser.get(liveserver + reverse("productdb:product-detail", kwargs={"product_id": p["id"]}))
page_text = browser.find_element_by_tag_name("body").text
assert "Internal Product ID" in page_text
assert test_internal_product_id in page_text
# end session
self.logout_user(browser)
def test_add_notification_message(self, browser, liveserver):
# go to the Product Database Homepage
browser.get(liveserver + reverse("productdb:home"))
browser.find_element_by_id("navbar_login").click()
time.sleep(3)
expected_homepage_text = "This database contains information about network equipment like routers and " \
"switches from multiple vendors."
self.login_user(
browser,
expected_content=expected_homepage_text,
username=self.ADMIN_USERNAME,
password=self.ADMIN_PASSWORD
)
# add a new notification message
browser.find_element_by_id("navbar_admin").click()
browser.find_element_by_id("navbar_admin_notification_message").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "Add Notification Message")
# add content
title = "My message title"
summary_message = "summary message"
detailed_message = "detailed message"
browser.find_element_by_id("id_title").send_keys(title)
browser.find_element_by_id("id_summary_message").send_keys(summary_message)
browser.find_element_by_id("id_detailed_message").send_keys(detailed_message)
browser.find_element_by_id("submit").click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, title)
assert summary_message in browser.find_element_by_tag_name("body").text
# end session
self.logout_user(browser)
def test_browse_products_view(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
expected_cisco_row = "C2960X-STACK Catalyst 2960-X FlexStack Plus Stacking Module 1195.00 USD"
expected_juniper_row = "EX-SFP-1GE-LX SFP 1000Base-LX Gigabit Ethernet Optics, 1310nm for " \
"10km transmission on SMF 1000.00 USD"
default_vendor = "Cisco Systems"
# a user hits the browse product list url
browser.get(liveserver + reverse("productdb:browse_vendor_products"))
time.sleep(5)
# check that the user sees a table
page_text = browser.find_element_by_tag_name('body').text
assert "Showing 1 to" in page_text
# the user sees a selection field, where the value "Cisco Systems" is selected
pl_selection = browser.find_element_by_id("vendor_selection")
assert default_vendor in pl_selection.text
# the table has three buttons: Copy, CSV and a PDF
dt_buttons = browser.find_element_by_class_name("dt-buttons")
assert "PDF" == dt_buttons.find_element_by_xpath("//button[span='PDF']").text
assert "Copy" == dt_buttons.find_element_by_xpath("//button[span='Copy']").text
assert "CSV" == dt_buttons.find_element_by_xpath("//button[span='CSV']").text
assert "Excel" == dt_buttons.find_element_by_xpath("//button[span='Excel']").text
# the table shows 10 entries from the list (below the table, there is a string "Showing 1 to 10 of \d+ entries"
dt_wrapper = browser.find_element_by_id("product_table_info")
assert re.match(r"Showing 1 to \d+ of \d+ entries", dt_wrapper.text) is not None
# the page reloads and the table contains now the element "C2960X-STACK" as the first element of the table
table = browser.find_element_by_id('product_table')
rows = table.find_elements_by_tag_name('tr')
assert expected_cisco_row in [row.text for row in rows]
# navigate to a detail view
link = browser.find_element_by_link_text("PWR-C1-350WAC")
browser.execute_script("return arguments[0].scrollIntoView();", link)
time.sleep(1)
test_product_id = "WS-C2960-24LT-L"
browser.find_element_by_link_text(test_product_id).click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "%s Product details" % test_product_id)
# reopen the browse vendor products table
browser.get(liveserver + reverse("productdb:browse_vendor_products"))
time.sleep(5)
# the user sees a selection field, where the value "Cisco Systems" is selected
pl_selection = browser.find_element_by_id("vendor_selection")
assert default_vendor in pl_selection.text
pl_selection = Select(pl_selection)
# the user chooses the list named "Juniper Networks" and press the button "view product list"
pl_selection.select_by_visible_text("Juniper Networks")
browser.find_element_by_id("submit").send_keys(Keys.ENTER)
self.wait_for_text_to_be_displayed_in_body_tag(browser, "EX-SFP-1GE-LX")
# the page reloads and the table contains now the element "EX-SFP-1GE-LX" as the first element of the table
table = browser.find_element_by_id('product_table')
rows = table.find_elements_by_tag_name('tr')
match = False
for i in range(0, 3):
match = (expected_juniper_row, [row.text for row in rows])
if match:
break
time.sleep(3)
if not match:
pytest.fail("Element not found")
def test_browse_products_view_csv_export(self, browser, liveserver, test_download_dir):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# a user hits the browse product list url
browser.get(liveserver + reverse("productdb:browse_vendor_products"))
# the user sees a selection field, where the value "Cisco Systems" is selected
vendor_name = "Cisco Systems"
pl_selection = browser.find_element_by_id("vendor_selection")
assert vendor_name in pl_selection.text
# the user hits the button CSV
dt_buttons = browser.find_element_by_class_name("dt-buttons")
dt_buttons.find_element_by_xpath("//button[span='CSV']").click()
# the file should download automatically (firefox is configured this way)
time.sleep(2)
# verify that the file is a CSV formatted field (with ";" as delimiter)
file = os.path.join(test_download_dir, "export products - %s.csv" % vendor_name)
with open(file, "r+", encoding="utf-8") as f:
assert "Product ID;Description;List Price;Lifecycle State\n" == f.readline()
def test_search_function_on_browse_vendor_products_view(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# a user hits the browse product list url
browser.get(liveserver + reverse("productdb:browse_vendor_products"))
time.sleep(5)
# he enters a search term in the search box
search_term = "WS-C2960X-24P"
search_xpath = '//div[@class="col-sm-4"]/div[@id="product_table_filter"]/label/input[@type="search"]'
search = browser.find_element_by_xpath(search_xpath)
search.send_keys(search_term)
time.sleep(3)
# show product groups
dt_buttons = browser.find_element_by_class_name("dt-buttons")
dt_buttons.find_element_by_xpath("//button[span='show additional columns ']").click()
browser.find_element_by_link_text("Internal Product ID").click()
browser.find_element_by_link_text("Product Group").click()
# the table performs the search function and a defined amount of rows is displayed
expected_table_content = "Product ID Product Group Description " \
"List Price Lifecycle State Internal Product ID"
table_rows = [
"WS-C2960X-24PD-L Catalyst 2960X Catalyst 2960-X 24 GigE PoE 370W, 2 x 10G SFP+, "
"LAN Base 4595.00 USD 2960x-24pd-l",
"WS-C2960X-24PS-L Catalyst 2960X Catalyst 2960-X 24 GigE PoE 370W, 4 x 1G SFP, "
"LAN Base 3195.00 USD 2960x-24ps-l"
]
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
for r in table_rows:
assert r in table.text
browser.find_element_by_xpath(search_xpath).clear()
time.sleep(1)
# search product by column (contains)
browser.find_element_by_id("column_search_Product ID").send_keys("WS-C2960X-24P")
time.sleep(2)
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
for r in table_rows:
assert r in table.text
browser.find_element_by_id("column_search_Product ID").clear()
# search product by column (contains)
browser.find_element_by_id("column_search_Product Group").send_keys("2960X")
time.sleep(2)
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
for r in table_rows:
assert r in table.text
browser.find_element_by_id("column_search_Product Group").clear()
# search description by column
browser.find_element_by_id("column_search_Description").send_keys("10G SFP")
time.sleep(2)
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
assert table_rows[0] in table.text
browser.find_element_by_id("column_search_Description").clear()
# search description by column
browser.find_element_by_id("column_search_List Price").send_keys("3195")
time.sleep(2)
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
assert r[1] in table.text
browser.find_element_by_id("column_search_List Price").clear()
def test_browse_all_products_view(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
expected_cisco_row = "Cisco Systems C2960X-STACK Catalyst 2960-X FlexStack Plus Stacking Module 1195.00 USD"
expected_juniper_row = "Juniper Networks EX-SFP-1GE-LX SFP 1000Base-LX Gigabit Ethernet Optics, 1310nm for " \
"10km transmission on SMF 1000.00 USD"
# a user hits the browse product list url
browser.get(liveserver + reverse("productdb:all_products"))
# check that the user sees a table
time.sleep(5)
page_text = browser.find_element_by_tag_name('body').text
assert "Showing 1 to" in page_text
# the table has three buttons: Copy, CSV and a PDF
dt_buttons = browser.find_element_by_class_name("dt-buttons")
assert "PDF" == dt_buttons.find_element_by_xpath("//button[span='PDF']").text
assert "Copy" == dt_buttons.find_element_by_xpath("//button[span='Copy']").text
assert "CSV" == dt_buttons.find_element_by_xpath("//button[span='CSV']").text
assert "Excel" == dt_buttons.find_element_by_xpath("//button[span='Excel']").text
# the table shows 10 entries from the list (below the table, there is a string "Showing 1 to 10 of \d+ entries"
dt_wrapper = browser.find_element_by_id("product_table_info")
assert re.match(r"Showing 1 to \d+ of \d+ entries", dt_wrapper.text) is not None
# the page reloads and the table contains now the element "C2960X-STACK" as the first element of the table
table = browser.find_element_by_id('product_table')
rows = table.find_elements_by_tag_name('tr')
assert expected_cisco_row in [row.text for row in rows]
# the page reloads and the table contains now the element "EX-SFP-1GE-LX" as the first element of the table
table = browser.find_element_by_id('product_table')
rows = table.find_elements_by_tag_name('tr')
match = False
for i in range(0, 3):
match = (expected_juniper_row,
[row.text for row in rows])
if match:
break
time.sleep(3)
if not match:
pytest.fail("Element not found")
# navigate to a detail view
test_product_id = "GLC-LH-SMD="
browser.find_element_by_link_text(test_product_id).click()
self.wait_for_text_to_be_displayed_in_body_tag(browser, "%s Product details" % test_product_id)
def test_browse_all_products_view_csv_export(self, browser, liveserver, test_download_dir):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# a user hits the browse product list url
browser.get(liveserver + reverse("productdb:all_products"))
# the user hits the button CSV
dt_buttons = browser.find_element_by_class_name("dt-buttons")
dt_buttons.find_element_by_xpath("//button[span='CSV']").click()
# the file should download automatically (firefox is configured this way)
time.sleep(2)
# verify that the file is a CSV formatted field (with ";" as delimiter)
file = os.path.join(test_download_dir, "export products.csv")
with open(file, "r+", encoding="utf-8") as f:
assert "Vendor;Product ID;Description;List Price;Lifecycle State\n" == f.readline()
def test_search_function_on_all_products_view(self, browser, liveserver):
self.api_helper.drop_all_data(liveserver)
self.api_helper.load_base_test_data(liveserver)
# a user hits the browse product list url
browser.get(liveserver + reverse("productdb:all_products"))
# he enters a search term in the search box
search_term = "WS-C2960X-24P"
search_xpath = '//div[@class="col-sm-4"]/div[@id="product_table_filter"]/label/input[@type="search"]'
search = browser.find_element_by_xpath(search_xpath)
search.send_keys(search_term)
time.sleep(3)
# the table performs the search function and a defined amount of rows is displayed
expected_table_content = """Vendor Product ID Description List Price Lifecycle State"""
table_rows = [
'WS-C2960X-24PD-L Catalyst 2960-X 24 GigE PoE 370W, 2 x 10G SFP+, LAN Base 4595.00 USD',
'WS-C2960X-24PS-L Catalyst 2960-X 24 GigE PoE 370W, 4 x 1G SFP, LAN Base 3195.00 USD',
]
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
for r in table_rows:
assert r in table.text
browser.find_element_by_xpath(search_xpath).clear()
time.sleep(1)
# search vendor by column
browser.find_element_by_id("column_search_Vendor").send_keys("Cisco")
time.sleep(2)
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
for r in table_rows:
assert r in table.text
browser.find_element_by_id("column_search_Vendor").clear()
# search product by column
browser.find_element_by_id("column_search_Product ID").send_keys("WS-C2960X-24P")
time.sleep(2)
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
for r in table_rows:
assert r in table.text
browser.find_element_by_id("column_search_Product ID").clear()
# search description by column
browser.find_element_by_id("column_search_Description").send_keys("10G SFP")
time.sleep(2)
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
assert table_rows[0] in table.text
browser.find_element_by_id("column_search_Description").clear()
# search description by column
browser.find_element_by_id("column_search_List Price").send_keys("3195")
time.sleep(2)
table = browser.find_element_by_id('product_table')
assert expected_table_content in table.text
assert r[1] in table.text
browser.find_element_by_id("column_search_List Price").clear()
| [
"[email protected]"
] | |
cca9f2e5ed6c7cd9fe744913449f05e61d1ed854 | 8a47ab47a101d4b44dd056c92a1763d5fac94f75 | /力扣/简单练习/300-最长上升子序列.py | edecfbee733ea3c1f051716235583aa67c1a5524 | [] | no_license | Clint-cc/Leecode | d5528aa7550a13a5bcf2f3913be2d5db2b5299f3 | 8befe73ab3eca636944800e0be27c179c45e1dbf | refs/heads/master | 2020-09-14T07:35:41.382377 | 2020-07-01T01:27:18 | 2020-07-01T01:27:18 | 223,066,742 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,663 | py | # !D:/Code/python
# -*- coding:utf-8 -*-
# @Author : Clint
# @Question : 给定一个无序的整数数组,找到其中最长上升子序列的长度。
def lengthOfLIS(nums):
'''
思路:遍历数组,当前的下一个元素大于当前,count+1,当不大于时比较count和max_count,
最后输出max_count
这题有坑: 输入[10,9,2,5,3,7,101,18],输出4,解释:最长的上升子序列是 [2,3,7,101],它的长度是 4
:param nums:
:return:
'''
count = 1
max_count = 1
for i in range(len(nums) - 1):
if nums[i + 1] >= nums[i]:
count += 1
else:
if count > max_count:
max_count = count
count = 1
else:
count = 1
if max_count < count:
max_count = count
return max_count
# 动态规划
def lengthOfLIS(nums):
if not nums:
return 0
dp = [1] * len(nums)
for i in range(len(nums)):
for j in range(i):
if nums[j] < nums[i]: # 如果要求非严格递增,将此行 '<' 改为 '<=' 即可。
dp[i] = max(dp[i], dp[j] + 1)
return max(dp)
# 二分查找
def lengthOfLIS(nums):
d = []
for n in nums:
if not d or n > d[-1]:
d.append(n)
else:
l, r = 0, len(d) - 1
loc = r
while l <= r:
mid = (l + r) // 2
if d[mid] >= n:
loc = mid
r = mid - 1
else:
l = mid + 1
d[loc] = n
return len(d)
print(lengthOfLIS([1, 2, 5, 3, 7, 11, 18]))
| [
"[email protected]"
] | |
3039c444c18f0b492a472774de7ddcf70fefc723 | 01dd174a3a7d26226564711e32711f137513663f | /pyscf/grad/uks.py | cb2d2d1dd98fc530fe5f2ab8a3bcd9d5ad9f1214 | [
"Apache-2.0"
] | permissive | cherishyli/pyscf | 00cb09c873edc8890be8501414678cdfa54b177e | 468a4bfc4ce067eb7dab6f9289d71122b219609e | refs/heads/master | 2020-04-18T11:40:00.398066 | 2019-01-24T23:07:36 | 2019-01-24T23:07:36 | 167,508,739 | 1 | 0 | Apache-2.0 | 2019-01-25T08:00:12 | 2019-01-25T08:00:12 | null | UTF-8 | Python | false | false | 10,924 | py | #!/usr/bin/env python
# Copyright 2014-2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <[email protected]>
#
'''Non-relativistic UKS analytical nuclear gradients'''
import time
import numpy
import scipy.linalg
from pyscf import lib
from pyscf.lib import logger
from pyscf.grad import rhf as rhf_grad
from pyscf.grad import rks as rks_grad
from pyscf.grad import uhf as uhf_grad
from pyscf.dft import numint, gen_grid
from pyscf import __config__
def get_veff(ks_grad, mol=None, dm=None):
'''Coulomb + XC functional
'''
if mol is None: mol = ks_grad.mol
if dm is None: dm = ks_grad.base.make_rdm1()
t0 = (time.clock(), time.time())
mf = ks_grad.base
ni = mf._numint
if ks_grad.grids is not None:
grids = ks_grad.grids
else:
grids = mf.grids
if grids.coords is None:
grids.build(with_non0tab=True)
if mf.nlc != '':
raise NotImplementedError
#enabling range-separated hybrids
omega, alpha, hyb = ni.rsh_and_hybrid_coeff(mf.xc, spin=mol.spin)
mem_now = lib.current_memory()[0]
max_memory = max(2000, ks_grad.max_memory*.9-mem_now)
if ks_grad.grid_response:
exc, vxc = get_vxc_full_response(ni, mol, grids, mf.xc, dm,
max_memory=max_memory,
verbose=ks_grad.verbose)
logger.debug1(ks_grad, 'sum(grids response) %s', exc.sum(axis=0))
else:
exc, vxc = get_vxc(ni, mol, grids, mf.xc, dm,
max_memory=max_memory, verbose=ks_grad.verbose)
t0 = logger.timer(ks_grad, 'vxc', *t0)
if abs(hyb) < 1e-10:
vj = ks_grad.get_j(mol, dm)
vxc += vj[0] + vj[1]
else:
vj, vk = ks_grad.get_jk(mol, dm)
vk *= hyb
if abs(omega) > 1e-10: # For range separated Coulomb operator
with mol.with_range_coulomb(omega):
vk += ks_grad.get_k(mol, dm) * (alpha - hyb)
vxc += vj[0] + vj[1] - vk
return lib.tag_array(vxc, exc1_grid=exc)
def get_vxc(ni, mol, grids, xc_code, dms, relativity=0, hermi=1,
max_memory=2000, verbose=None):
xctype = ni._xc_type(xc_code)
make_rho, nset, nao = ni._gen_rho_evaluator(mol, dms, hermi)
ao_loc = mol.ao_loc_nr()
vmat = numpy.zeros((2,3,nao,nao))
if xctype == 'LDA':
ao_deriv = 1
for ao, mask, weight, coords \
in ni.block_loop(mol, grids, nao, ao_deriv, max_memory):
rho_a = make_rho(0, ao[0], mask, 'LDA')
rho_b = make_rho(1, ao[0], mask, 'LDA')
vxc = ni.eval_xc(xc_code, (rho_a,rho_b), 1, relativity, 1, verbose)[1]
vrho = vxc[0]
aow = numpy.einsum('pi,p->pi', ao[0], weight*vrho[:,0])
rks_grad._d1_dot_(vmat[0], mol, ao[1:4], aow, mask, ao_loc, True)
aow = numpy.einsum('pi,p->pi', ao[0], weight*vrho[:,1])
rks_grad._d1_dot_(vmat[1], mol, ao[1:4], aow, mask, ao_loc, True)
rho = vxc = vrho = aow = None
elif xctype == 'GGA':
ao_deriv = 2
for ao, mask, weight, coords \
in ni.block_loop(mol, grids, nao, ao_deriv, max_memory):
rho_a = make_rho(0, ao[:4], mask, 'GGA')
rho_b = make_rho(1, ao[:4], mask, 'GGA')
vxc = ni.eval_xc(xc_code, (rho_a,rho_b), 1, relativity, 1, verbose)[1]
wva, wvb = numint._uks_gga_wv0((rho_a,rho_b), vxc, weight)
rks_grad._gga_grad_sum_(vmat[0], mol, ao, wva, mask, ao_loc)
rks_grad._gga_grad_sum_(vmat[1], mol, ao, wvb, mask, ao_loc)
rho_a = rho_b = vxc = wva = wvb = None
elif xctype == 'NLC':
raise NotImplementedError('NLC')
else:
raise NotImplementedError('meta-GGA')
exc = numpy.zeros((mol.natm,3))
# - sign because nabla_X = -nabla_x
return exc, -vmat
def get_vxc_full_response(ni, mol, grids, xc_code, dms, relativity=0, hermi=1,
max_memory=2000, verbose=None):
'''Full response including the response of the grids'''
xctype = ni._xc_type(xc_code)
make_rho, nset, nao = ni._gen_rho_evaluator(mol, dms, hermi)
ao_loc = mol.ao_loc_nr()
aoslices = mol.aoslice_by_atom()
excsum = 0
vmat = numpy.zeros((2,3,nao,nao))
if xctype == 'LDA':
ao_deriv = 1
for atm_id, (coords, weight, weight1) \
in enumerate(rks_grad.grids_response_cc(grids)):
ngrids = weight.size
sh0, sh1 = aoslices[atm_id][:2]
mask = gen_grid.make_mask(mol, coords)
ao = ni.eval_ao(mol, coords, deriv=ao_deriv, non0tab=mask)
rho_a = make_rho(0, ao[0], mask, 'LDA')
rho_b = make_rho(1, ao[0], mask, 'LDA')
exc, vxc = ni.eval_xc(xc_code, (rho_a,rho_b), 1, relativity, 1, verbose)[:2]
vrho = vxc[0]
vtmp = numpy.zeros((3,nao,nao))
aow = numpy.einsum('pi,p->pi', ao[0], weight*vrho[:,0])
rks_grad._d1_dot_(vtmp, mol, ao[1:4], aow, mask, ao_loc, True)
vmat[0] += vtmp
excsum += numpy.einsum('r,r,nxr->nx', exc, rho_a+rho_b, weight1)
excsum[atm_id] += numpy.einsum('xij,ji->x', vtmp, dms[0]) * 2
vtmp = numpy.zeros((3,nao,nao))
aow = numpy.einsum('pi,p->pi', ao[0], weight*vrho[:,1])
rks_grad._d1_dot_(vtmp, mol, ao[1:4], aow, mask, ao_loc, True)
vmat[1] += vtmp
excsum[atm_id] += numpy.einsum('xij,ji->x', vtmp, dms[1]) * 2
rho = vxc = vrho = aow = None
elif xctype == 'GGA':
ao_deriv = 2
for atm_id, (coords, weight, weight1) \
in enumerate(rks_grad.grids_response_cc(grids)):
ngrids = weight.size
sh0, sh1 = aoslices[atm_id][:2]
mask = gen_grid.make_mask(mol, coords)
ao = ni.eval_ao(mol, coords, deriv=ao_deriv, non0tab=mask)
rho_a = make_rho(0, ao[:4], mask, 'GGA')
rho_b = make_rho(1, ao[:4], mask, 'GGA')
exc, vxc = ni.eval_xc(xc_code, (rho_a,rho_b), 1, relativity, 1, verbose)[:2]
wva, wvb = numint._uks_gga_wv0((rho_a,rho_b), vxc, weight)
vtmp = numpy.zeros((3,nao,nao))
rks_grad._gga_grad_sum_(vtmp, mol, ao, wva, mask, ao_loc)
vmat[0] += vtmp
excsum += numpy.einsum('r,r,nxr->nx', exc, rho_a[0]+rho_b[0], weight1)
excsum[atm_id] += numpy.einsum('xij,ji->x', vtmp, dms[0]) * 2
vtmp = numpy.zeros((3,nao,nao))
rks_grad._gga_grad_sum_(vtmp, mol, ao, wvb, mask, ao_loc)
vmat[1] += vtmp
excsum[atm_id] += numpy.einsum('xij,ji->x', vtmp, dms[1]) * 2
rho_a = rho_b = vxc = wva = wvb = None
elif xctype == 'NLC':
raise NotImplementedError('NLC')
else:
raise NotImplementedError('meta-GGA')
# - sign because nabla_X = -nabla_x
return excsum, -vmat
class Gradients(uhf_grad.Gradients):
grid_response = getattr(__config__, 'grad_uks_Gradients_grid_response', False)
def __init__(self, mf):
uhf_grad.Gradients.__init__(self, mf)
self.grids = None
self.grid_response = False
self._keys = self._keys.union(['grid_response', 'grids'])
def dump_flags(self):
uhf_grad.Gradients.dump_flags(self)
logger.info(self, 'grid_response = %s', self.grid_response)
return self
get_veff = get_veff
def extra_force(self, atom_id, envs):
'''Hook for extra contributions in analytical gradients.
Contributions like the response of auxiliary basis in density fitting
method, the grid response in DFT numerical integration can be put in
this function.
'''
if self.grid_response:
vhf = envs['vhf']
log = envs['log']
log.debug('grids response for atom %d %s',
atom_id, vhf.exc1_grid[atom_id])
return vhf.exc1_grid[atom_id]
else:
return 0
Grad = Gradients
if __name__ == '__main__':
from pyscf import gto
from pyscf import dft
mol = gto.Mole()
mol.atom = [
['O' , (0. , 0. , 0.)],
[1 , (0. , -0.757 , 0.587)],
[1 , (0. , 0.757 , 0.587)] ]
mol.basis = '631g'
mol.charge = 1
mol.spin = 1
mol.build()
mf = dft.UKS(mol)
mf.conv_tol = 1e-12
#mf.grids.atom_grid = (20,86)
e0 = mf.scf()
g = Gradients(mf)
print(lib.finger(g.kernel()) - -0.12090786243525126)
#[[-5.23195019e-16 -5.70291415e-16 5.32918387e-02]
# [ 1.33417513e-16 6.75277008e-02 -2.66519852e-02]
# [ 1.72274651e-16 -6.75277008e-02 -2.66519852e-02]]
g.grid_response = True
print(lib.finger(g.kernel()) - -0.12091122429043633)
#[[-2.95956939e-16 -4.22275612e-16 5.32998759e-02]
# [ 1.34532051e-16 6.75279140e-02 -2.66499379e-02]
# [ 1.68146089e-16 -6.75279140e-02 -2.66499379e-02]]
mf.xc = 'b88,p86'
e0 = mf.scf()
g = Gradients(mf)
print(lib.finger(g.kernel()) - -0.11509739136150157)
#[[ 2.58483362e-16 5.82369026e-16 5.17616036e-02]
# [-5.46977470e-17 6.39273304e-02 -2.58849008e-02]
# [ 5.58302713e-17 -6.39273304e-02 -2.58849008e-02]]
g.grid_response = True
print(lib.finger(g.kernel()) - -0.11507986316077731)
mf.xc = 'b3lypg'
e0 = mf.scf()
g = Gradients(mf)
print(lib.finger(g.kernel()) - -0.10202554999695367)
#[[ 6.47874920e-16 -2.75292214e-16 3.97215970e-02]
# [-6.60278148e-17 5.87909340e-02 -1.98650384e-02]
# [ 6.75500259e-18 -5.87909340e-02 -1.98650384e-02]]
mol = gto.Mole()
mol.atom = [
['H' , (0. , 0. , 1.804)],
['F' , (0. , 0. , 0. )], ]
mol.unit = 'B'
mol.basis = '631g'
mol.charge = -1
mol.spin = 1
mol.build()
mf = dft.UKS(mol)
mf.conv_tol = 1e-14
mf.kernel()
print(lib.finger(Gradients(mf).kernel()) - 0.10365160440876001)
# sum over z direction non-zero, due to meshgrid response
# H -0.0000000000 0.0000000000 -0.1481125370
# F -0.0000000000 0.0000000000 0.1481164667
mf = dft.UKS(mol)
mf.grids.prune = None
mf.grids.level = 6
mf.conv_tol = 1e-14
mf.kernel()
print(lib.finger(Gradients(mf).kernel()) - 0.10365040148752827)
# H 0.0000000000 0.0000000000 -0.1481124925
# F -0.0000000000 0.0000000000 0.1481122913
| [
"[email protected]"
] | |
119da14a29035eb8a5b1c9ba0c64dc7cb316c170 | fab39aa4d1317bb43bc11ce39a3bb53295ad92da | /nncf/tensorflow/graph/pattern_operations.py | 23435d263c3de7adf57353e47709a005e220e0df | [
"Apache-2.0"
] | permissive | dupeljan/nncf | 8cdce27f25f01ce8e611f15e1dc3036fb8548d6e | 0abfd7103ca212888a946ba4d0fbdb9d436fdaff | refs/heads/develop | 2023-06-22T00:10:46.611884 | 2021-07-22T10:32:11 | 2021-07-22T10:32:11 | 388,719,455 | 0 | 0 | Apache-2.0 | 2021-07-23T07:46:15 | 2021-07-23T07:43:43 | null | UTF-8 | Python | false | false | 3,416 | py | """
Copyright (c) 2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from nncf.common.graph.patterns import merge_two_types_of_operations
from nncf.tensorflow.graph.metatypes.common import ELEMENTWISE_LAYER_METATYPES
from nncf.tensorflow.graph.metatypes.common import GENERAL_CONV_LAYER_METATYPES
from nncf.tensorflow.graph.metatypes.common import LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_ONE_INPUT
from nncf.tensorflow.graph.metatypes.common import LINEAR_LAYER_METATYPES
LINEAR_OPERATIONS = {'type': list(
{
*{layer_name for m in GENERAL_CONV_LAYER_METATYPES for layer_name in m.get_all_aliases()},
*{layer_name for m in LINEAR_LAYER_METATYPES for layer_name in m.get_all_aliases()},
}
),
'label': 'LINEAR'
}
ELEMENTWISE_OPERATIONS = {'type': list(set(
layer_name for m in ELEMENTWISE_LAYER_METATYPES for layer_name in m.get_all_aliases()
)),
'label': 'ELEMENTWISE'
}
QUANTIZATION_AGNOSTIC_OPERATIONS = {
'type': list(set(
layer_name for m in LAYER_METATYPES_AGNOSTIC_TO_DATA_PRECISION_WITH_ONE_INPUT for layer_name in m.get_all_aliases()
)),
'label': 'ELEMENTWISE'
}
BATCH_NORMALIZATION_OPERATIONS = {'type': ['BatchNormalization',
'SyncBatchNormalization',],
'label': 'BATCH_NORMALIZATION'
}
KERAS_ACTIVATIONS_OPERATIONS = {
'type': ['ReLU',
'ThresholdedReLU',
'ELU',
'PReLU',
'LeakyReLU',
'Activation'],
'label': 'KERAS_ACTIVATIONS'
}
TF_ACTIVATIONS_OPERATIONS = {
'type': ['Relu'],
'label': 'TF_ACTIVATIONS'
}
ATOMIC_ACTIVATIONS_OPERATIONS = merge_two_types_of_operations(KERAS_ACTIVATIONS_OPERATIONS,
TF_ACTIVATIONS_OPERATIONS,
'ATOMIC_ACTIVATIONS')
POOLING_OPERATIONS = {'type': ['AveragePooling2D',
'AveragePooling3D',
'GlobalAveragePooling2D',
'GlobalAveragePooling3D'],
'label': 'POOLING'}
SINGLE_OPS = merge_two_types_of_operations(POOLING_OPERATIONS,
{
'type': [
'Average',
'LayerNormalization',
'UpSampling2D'
]
}, label='SINGLE_OPS')
ARITHMETIC_OPERATIONS = {'type': ['__iadd__',
'__add__',
'__mul__',
'__rmul__'],
'label': 'ARITHMETIC'}
| [
"[email protected]"
] | |
976828ea55563b1986da76957c19a1fc536486b2 | 6364bb727b623f06f6998941299c49e7fcb1d437 | /msgraph-cli-extensions/src/userscontacts/azext_userscontacts/vendored_sdks/userscontacts/aio/__init__.py | 03db4e735a0c8c4b412b41f6a92f232c27276d81 | [
"MIT"
] | permissive | kanakanaidu/msgraph-cli | 1d6cd640f4e10f4bdf476d44d12a7c48987b1a97 | b3b87f40148fb691a4c331f523ca91f8a5cc9224 | refs/heads/main | 2022-12-25T08:08:26.716914 | 2020-09-23T14:29:13 | 2020-09-23T14:29:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 545 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._users_contacts_async import UsersContacts
__all__ = ['UsersContacts']
| [
"[email protected]"
] | |
d4b3c37168303b568f64ff5fef401bc1cc1264b2 | 3400394303380c2510b17b95839dd4095abc55a4 | /src/py310/lesson02/comments.py | a4dca2ef7c776bd871c81c1adcdd13adb12c2fce | [
"MIT"
] | permissive | IBRAR21/py310_sp2021 | daf53b76decf060d72201a3db66f0f7c697876a7 | 584e37b9d96654c1241fc787d157c292301d5bf7 | refs/heads/master | 2023-05-30T16:43:09.614565 | 2021-06-09T21:41:14 | 2021-06-09T21:41:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,828 | py | # --------------------------------------------------------------------------------- #
# AQUABUTTON wxPython IMPLEMENTATION
#
# Andrea Gavana, @ 07 October 2008
# Latest Revision: 24 Nov 2011, 22.00 GMT
#
#
# TODO List
#
# 1) Anything to do?
#
#
# For all kind of problems, requests of enhancements and bug reports, please
# write to me at:
#
# [email protected]
# [email protected]
#
# Or, obviously, to the wxPython mailing list!!!
#
#
# End Of Comments
# --------------------------------------------------------------------------------- #
"""
:class:`AquaButton` is another custom-drawn button class which *approximatively* mimics
the behaviour of Aqua buttons on the Mac.
Description
===========
:class:`AquaButton` is another custom-drawn button class which *approximatively* mimics
the behaviour of Aqua buttons on the Mac. At the moment this class supports:
* Bubble and shadow effects;
* Customizable background, foreground and hover colours;
* Rounded-corners buttons;
* Text-only or image+text buttons;
* Pulse effect on gaining focus.
And a lot more. Check the demo for an almost complete review of the functionalities.
Usage
=====
Sample usage::
import wx
import wx.lib.agw.aquabutton as AB
app = wx.App(0)
frame = wx.Frame(None, -1, "AquaButton Test")
mainPanel = wx.Panel(frame)
mainPanel.SetBackgroundColour(wx.WHITE)
# Initialize AquaButton 1 (with image)
bitmap = wx.Bitmap("my_button_bitmap.png", wx.BITMAP_TYPE_PNG)
btn1 = AB.AquaButton(mainPanel, -1, bitmap, "AquaButton")
# Initialize AquaButton 2 (no image)
btn2 = AB.AquaButton(mainPanel, -1, None, "Hello World!")
frame.Show()
app.MainLoop()
Supported Platforms
===================
AquaButton has been tested on the following platforms:
* Windows (Windows XP);
* Linux Ubuntu (10.10).
Window Styles
=============
`No particular window styles are available for this class.`
Events Processing
=================
This class processes the following events:
================= ==================================================
Event Name Description
================= ==================================================
``wx.EVT_BUTTON`` Process a `wxEVT_COMMAND_BUTTON_CLICKED` event, when the button is clicked.
================= ==================================================
License And Version
===================
:class:`AquaButton` control is distributed under the wxPython license.
Latest Revision: Andrea Gavana @ 22 Nov 2011, 22.00 GMT
Version 0.4
"""
x = x + 1 # allow for border
BORDER = 1
x = x + BORDER
def allow_for_border(coordinate):
return coordinate + 1
y = allow_for_border(y)
def calc(num1, num2):
# calc product 2 numbers
return num1 + num2
def calculate_product(left, right):
return left * right
| [
"[email protected]"
] | |
3b4b65765a6275e2b4fed60d9412aac3f7fb9665 | d12b59b33df5c467abf081d48e043dac70cc5a9c | /ixnetwork_restpy/testplatform/sessions/ixnetwork/quicktest/passcriteria_985f11fda90dc3b8dac84a4a881b8740.py | 6920d6cfe8478b76037b42d0c156e50d2daa5519 | [
"MIT"
] | permissive | ajbalogh/ixnetwork_restpy | 59ce20b88c1f99f95a980ff01106bda8f4ad5a0f | 60a107e84fd8c1a32e24500259738e11740069fd | refs/heads/master | 2023-04-02T22:01:51.088515 | 2021-04-09T18:39:28 | 2021-04-09T18:39:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,492 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class PassCriteria(Base):
"""This applies the Pass Criteria to each trial in the test and determines whether the trial passed or failed.
The PassCriteria class encapsulates a required passCriteria resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'passCriteria'
_SDM_ATT_MAP = {
'EnablePassFail': 'enablePassFail',
}
def __init__(self, parent):
super(PassCriteria, self).__init__(parent)
@property
def EnablePassFail(self):
"""
Returns
-------
- bool: If true, the pass fail criteria is set.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnablePassFail'])
@EnablePassFail.setter
def EnablePassFail(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnablePassFail'], value)
def update(self, EnablePassFail=None):
"""Updates passCriteria resource on the server.
Args
----
- EnablePassFail (bool): If true, the pass fail criteria is set.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def Apply(self):
"""Executes the apply operation on the server.
Applies the specified Quick Test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('apply', payload=payload, response_object=None)
def ApplyAsync(self):
"""Executes the applyAsync operation on the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('applyAsync', payload=payload, response_object=None)
def ApplyAsyncResult(self):
"""Executes the applyAsyncResult operation on the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('applyAsyncResult', payload=payload, response_object=None)
def ApplyITWizardConfiguration(self):
"""Executes the applyITWizardConfiguration operation on the server.
Applies the specified Quick Test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('applyITWizardConfiguration', payload=payload, response_object=None)
def GenerateReport(self):
"""Executes the generateReport operation on the server.
Generate a PDF report for the last succesfull test run.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('generateReport', payload=payload, response_object=None)
def Run(self, *args, **kwargs):
"""Executes the run operation on the server.
Starts the specified Quick Test and waits for its execution to finish.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
run(InputParameters=string)list
-------------------------------
- InputParameters (str): The input arguments of the test.
- Returns list(str): This method is synchronous and returns the result of the test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('run', payload=payload, response_object=None)
def Start(self, *args, **kwargs):
"""Executes the start operation on the server.
Starts the specified Quick Test.
The IxNetwork model allows for multiple method Signatures with the same name while python does not.
start(InputParameters=string)
-----------------------------
- InputParameters (str): The input arguments of the test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]
for item in kwargs.items(): payload[item[0]] = item[1]
return self._execute('start', payload=payload, response_object=None)
def Stop(self):
"""Executes the stop operation on the server.
Stops the currently running Quick Test.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('stop', payload=payload, response_object=None)
def WaitForTest(self):
"""Executes the waitForTest operation on the server.
Waits for the execution of the specified Quick Test to be completed.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('waitForTest', payload=payload, response_object=None)
| [
"[email protected]"
] | |
ec356e53c4d259f06b48074389ec9b57fb66f575 | 199522cb43b4e2c7e3bf034a0e604794258562b1 | /0x03-python-data_structures/7-add_tuple.py | 96d715528f3d23cdf3d725a9838247a97a8e4635 | [] | no_license | jormao/holbertonschool-higher_level_programming | a0fd92f2332f678e6fe496057c04f2995d24a4ac | 360b3a7294e9e0eadcadb57d4c48c22369c05111 | refs/heads/master | 2020-09-29T01:36:20.094209 | 2020-05-15T03:27:06 | 2020-05-15T03:27:06 | 226,915,744 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 454 | py | #!/usr/bin/python3
def add_tuple(tuple_a=(), tuple_b=()):
if len(tuple_a) != 2:
if len(tuple_a) == 1:
tuple_a = (tuple_a[0], 0)
if len(tuple_a) == 0:
tuple_a = (0, 0)
if len(tuple_b) != 2:
if len(tuple_b) == 1:
tuple_b = (tuple_b[0], 0)
if len(tuple_b) == 0:
tuple_b = (0, 0)
tuple_c = ((tuple_a[0] + tuple_b[0]), (tuple_a[1] + tuple_b[1]))
return (tuple_c)
| [
"[email protected]"
] | |
796a852c4ccdd0bc598e0b373567c854094d0cfd | 45fb509bf21ac003a40fd404d7c0cc995e741672 | /perceptron_algorithm/perceptron_algo_2nd_method.py | 59807adb1a2c854110b8644f2b103f49899851f4 | [] | no_license | FarahAgha/MachineLearning | 0d17511f7495190dfd2368554428208c7d0eadf7 | cf385135e016a63fb16bd326586fcd8ecb3c4355 | refs/heads/master | 2021-01-04T01:03:08.810401 | 2020-03-15T18:42:16 | 2020-03-15T18:42:16 | 240,314,331 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,011 | py | # Perceptron Algorithm perceptron_algo_2nd_method.py
# See https://medium.com/@thomascountz/19-line-line-by-line-python-perceptron-b6f113b161f3 for details.
import numpy as np
class Perceptron(object):
def __init__(self, no_of_inputs, threshold=100, learning_rate=0.01):
self.threshold = threshold
self.learning_rate = learning_rate
self.weights = np.zeros(no_of_inputs + 1)
def predict(self, inputs):
summation = np.dot(inputs, self.weights[1:]) + self.weights[0]
if summation > 0:
activation = 1
else:
activation = 0
return activation
def train(self, training_inputs, labels):
for _ in range(self.threshold):
for inputs, label in zip(training_inputs, labels):
prediction = self.predict(inputs)
self.weights[1:] += self.learning_rate * (label - prediction) * inputs
self.weights[0] += self.learning_rate * (label - prediction) | [
"[email protected]"
] | |
f9dd6d91e8aaee9919ed20cb74c14fc6f2d22c8b | 44c81d8cc9c148c93cf9a77faec345693059c973 | /fetch.py | 568adf1e9271c6ebe976f93a3b0c8306a2ea428a | [] | no_license | neoatlantis/currency-data | 26566a5131b814f324153db451ae9f879fda9b72 | c19bc94d6d6ba6706f625e94e176b77bee455b04 | refs/heads/master | 2020-06-10T19:02:58.973856 | 2016-12-08T06:35:46 | 2016-12-08T06:35:46 | 75,902,576 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,231 | py | #!/usr/bin/env python
import os
import time
import requests
import shelve
import sys
BASEPATH = os.path.realpath(os.path.dirname(sys.argv[0]))
filepath = lambda *i: os.path.join(BASEPATH, *i)
# check for api key
try:
apikeyFilepath = filepath('apikey')
apikey = open(apikeyFilepath).read().strip()
except:
print "Put your API key at `openexchangerates.org` into file `apikey`."
sys.exit(1)
# check for database
db = shelve.open(filepath('currencies.db'), flag='c')
latest = 0
for key in db:
timestamp = float(key)
if timestamp > latest:
latest = timestamp
if time.time() - latest < 3000 and 'force' not in sys.argv:
print "You are requesting too frequent. Abandoned to prevent API",
print "exhaustion. Use `force` in command line to force a request."
db.close()
sys.exit(2)
# fetch url
url = "https://openexchangerates.org/api/latest.json?app_id=%s" % apikey
try:
req = requests.get(url)
if req.status_code != 200: raise
json = req.json()
json = {
'rates': json['rates'],
'timestamp': json['timestamp']
}
except:
print "Failed fetching newest data. Abort."
sys.exit(3)
print json
db[str(time.time())] = json
db.close()
sys.exit(0)
| [
"[email protected]"
] | |
179abd03f2ae118cfb2b85da6360707ead06748a | 1b10b46afdf24b4ce4f2d57e315e09e17c0a9c2b | /winding_helix.py | 51d16cff03b2651355fadbdb7bd2a560ed49af5b | [] | no_license | tthtlc/sansagraphics | e6aad1541dabc85b3871e1890c9f79aa33055355 | 113e559fb128c93ed1f02155ec74e76878b86c37 | refs/heads/master | 2021-01-15T15:52:35.126301 | 2020-03-30T16:58:57 | 2020-03-30T16:58:57 | 15,507,431 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,415 | py | # Pygame/PyopenGL example by Bastiaan Zapf, Apr 2009
### From http://python-opengl-examples.blogspot.sg/
#
# Draw an helix, wiggle it pleasantly
#
# Keywords: Alpha Blending, Textures, Animation, Double Buffer
from OpenGL.GL import *
from OpenGL.GLU import *
from math import * # trigonometry
import pygame # just to get a display
# get an OpenGL surface
pygame.init()
pygame.display.set_mode((800,600), pygame.OPENGL|pygame.DOUBLEBUF)
# How to catch errors here?
done = False
t=0
while not done:
t=t+1
# for fun comment out these two lines
glClearColor(0.0, 0.0, 0.0, 1.0)
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT)
# Get a perspective at the helix
glMatrixMode(GL_PROJECTION);
glLoadIdentity()
gluPerspective(90,1,0.01,1000)
gluLookAt(sin(t/200.0)*3,sin(t/500.0)*3,cos(t/200.0)*3,0,0,0,0,1,0)
# Draw the helix (this ought to be a display list call)
glMatrixMode(GL_MODELVIEW)
# get a texture (this ought not to be inside the inner loop)
texture=glGenTextures( 1 )
glBindTexture( GL_TEXTURE_2D, texture );
glTexEnvf( GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE );
# set sane defaults for a plethora of potentially uninitialized
# variables
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S,
GL_REPEAT);
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T,
GL_REPEAT );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
# a texture
#pulse = sin(t/30)*0.5+0.5 # try this one
pulse = 0
texdata=[[[0.0,0,1,1],
[0.0,0,0,0],
[0.0,1,0,1],
[0.0,0,0,0]],
[[0.0,0,0,0],
[pulse,pulse,pulse,1],
[pulse,pulse,pulse,1],
[0.0,0,0,0]],
[[0.0,1,0,1],
[1,pulse,pulse,1],
[pulse,pulse,0,1],
[0.0,0,0,0]],
[[0.0,0,0,0],
[0.0,0,0,0],
[0.0,0,0,0],
[0.0,0,0,0]]];
glTexImage2Df(GL_TEXTURE_2D, 0,4,0,GL_RGBA,
texdata)
glEnable(GL_BLEND);
glBlendFunc (GL_SRC_ALPHA, GL_ONE); # XXX Why GL_ONE?
# alternatively:
# glEnable(GL_DEPTH_TEST);
glEnable( GL_TEXTURE_2D );
# use the texture
glBindTexture( GL_TEXTURE_2D, texture );
# vertices & texture data
glBegin(GL_TRIANGLE_STRIP);
#pulse2 = 0.5
for i in range(0,100):
r=5.0 # try other values - integers as well
R=10.0
d=1 # try other values
j=i
#pulse2 += 0.5
if (i%3==0):
glTexCoord2f(0,i);
glVertex3f( cos(i/r)*cos(j/R) + (-2.5+i*0.05)*sin(j/R), (-2.5+i*0.05)*cos(j/R) - cos(i/r)*sin(j/R), sin(i/r));
elif (i%3==1):
glTexCoord2f(1,i);
glVertex3f( cos(i/r + 3.14/2)*cos(j/R) + (-2.5+i*0.05)*sin(j/R), (-2.5+i*0.05)*cos(j/R) - cos(i/r)*sin(j/R), sin(i/r + 3.14/1));
else:
glTexCoord2f(2,i);
glVertex3f( cos(i/r + 3.14/1)*cos(j/R) + (-2.5+i*0.05)*sin(j/R), (-2.5+i*0.05)*cos(j/R) - cos(i/r)*sin(j/R), sin(i/r+3.14/1));
# glVertex3f( cos(i/r+3.14)*pulse2, -2.5+i*0.05+d+pulse2*1, sin(i/r+3.14)*pulse2);
glEnd();
glFlush()
glDeleteTextures(texture)
pygame.display.flip()
| [
"[email protected]"
] | |
c4bbebeeaa1fede9542e856ca68e24409905d33f | c0f808504dd3d7fd27c39f1503fbc14c1d37bf9f | /sources/scipy-scipy-414c1ab/scipy/io/tests/test_wavfile.py | 266775ecd99e28e8010c480d95ff5fce9e266339 | [] | no_license | georgiee/lip-sync-lpc | 7662102d4715e4985c693b316a02d11026ffb117 | e931cc14fe4e741edabd12471713bf84d53a4250 | refs/heads/master | 2018-09-16T08:47:26.368491 | 2018-06-05T17:01:08 | 2018-06-05T17:01:08 | 5,779,592 | 17 | 4 | null | null | null | null | UTF-8 | Python | false | false | 2,146 | py | import os
import tempfile
import warnings
import numpy as np
from numpy.testing import assert_equal, assert_, assert_raises, assert_array_equal
from numpy.testing.utils import WarningManager
from scipy.io import wavfile
def datafile(fn):
return os.path.join(os.path.dirname(__file__), 'data', fn)
def test_read_1():
warn_ctx = WarningManager()
warn_ctx.__enter__()
try:
warnings.simplefilter('ignore', wavfile.WavFileWarning)
rate, data = wavfile.read(datafile('test-44100-le-1ch-4bytes.wav'))
finally:
warn_ctx.__exit__()
assert_equal(rate, 44100)
assert_(np.issubdtype(data.dtype, np.int32))
assert_equal(data.shape, (4410,))
def test_read_2():
rate, data = wavfile.read(datafile('test-8000-le-2ch-1byteu.wav'))
assert_equal(rate, 8000)
assert_(np.issubdtype(data.dtype, np.uint8))
assert_equal(data.shape, (800, 2))
def test_read_fail():
fp = open(datafile('example_1.nc'))
assert_raises(ValueError, wavfile.read, fp)
fp.close()
def _check_roundtrip(rate, dtype, channels):
fd, tmpfile = tempfile.mkstemp(suffix='.wav')
try:
os.close(fd)
data = np.random.rand(100, channels)
if channels == 1:
data = data[:,0]
data = (data*128).astype(dtype)
wavfile.write(tmpfile, rate, data)
rate2, data2 = wavfile.read(tmpfile)
assert_equal(rate, rate2)
assert_(data2.dtype.byteorder in ('<', '=', '|'), msg=data2.dtype)
assert_array_equal(data, data2)
finally:
os.unlink(tmpfile)
def test_write_roundtrip():
for signed in ('i', 'u'):
for size in (1, 2, 4, 8):
if size == 1 and signed == 'i':
# signed 8-bit integer PCM is not allowed
continue
for endianness in ('>', '<'):
if size == 1 and endianness == '<':
continue
for rate in (8000, 32000):
for channels in (1, 2, 5):
dt = np.dtype('%s%s%d' % (endianness, signed, size))
yield _check_roundtrip, rate, dt, channels
| [
"[email protected]"
] | |
41cc8cb8ec10ccb8c7eb432e8f3cc4602df5f651 | d043a51ff0ca2f9fb3943c3f0ea21c61055358e9 | /python3网络爬虫开发实战/数据存储/MySQL实验/删除数据2.py | 7af2d45b23cc102f658c4407ee7362981f7f0c80 | [] | no_license | lj1064201288/dell_python | 2f7fd9dbcd91174d66a2107c7b7f7a47dff4a4d5 | 529985e0e04b9bde2c9e0873ea7593e338b0a295 | refs/heads/master | 2020-03-30T03:51:51.263975 | 2018-12-11T13:21:13 | 2018-12-11T13:21:13 | 150,707,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 382 | py | import pymysql
db = pymysql.connect(host="localhost", user='root', password='123456', port=3306, db='django')
cursor = db.cursor()
table = "friends"
age = "age > 30"
sql = 'DELETE FROM {table} WHERE {age}'.format(table=table, age=age)
try:
cursor.execute(sql)
print("Successful...")
db.commit()
except:
print("Failed...")
db.rollback()
finally:
db.close() | [
"[email protected]"
] | |
92ed6a36ac6f7be76144f403a841125f2a79c943 | 633c18a9e1931f937f7f91f05ce9749a4ac169f6 | /work_with_pythest/tests/test_math.py | 05d5b8bf6daeef827b40a6d56148b1075e179af4 | [] | no_license | borko81/python_scripts | fb3ff79377f19233e18d20f4f150735cdbe52c29 | 4e8ed38550f3b90bc00c07605d7e92822b079206 | refs/heads/master | 2022-07-07T19:26:52.467714 | 2022-06-24T15:46:57 | 2022-06-24T15:46:57 | 224,904,971 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 280 | py | import pytest
def test_one_plus_one():
assert 1 + 1 == 2
def test_one_plust_two():
a = 1
b = 2
c = 3
assert a + b == c
def test_division_by_zero():
with pytest.raises(ZeroDivisionError) as e:
num = 1 / 0
assert 'division' in str(e.value)
| [
"[email protected]"
] | |
7373cab884ab98deb78bcd0b60f131314c4adecb | 42a5c898a3a750c54dc746429e306b9f40a8638e | /pizza/orders/admin.py | bd3ee529187b49a87581f033cfc17e3d0e95696a | [] | no_license | selbieh/Pizza | 16f4198714b88ad93f354e6c0eb98d92a19e364b | c10bd78b1318d7e81128e66fa67d09241618e00d | refs/heads/master | 2022-05-18T04:25:46.431748 | 2020-01-13T13:45:59 | 2020-01-13T13:45:59 | 233,557,658 | 0 | 0 | null | 2022-04-22T22:59:33 | 2020-01-13T09:24:18 | Python | UTF-8 | Python | false | false | 120 | py | from django.contrib import admin
from .models import orderPizzaItem,order
admin.site.register([orderPizzaItem,order]) | [
"[email protected]"
] | |
f2ebf591f742eb1433a9072d3c9826170e1cb8cd | 2f73a3d4daac2aa2c38c3443b4f5555c49faa1c8 | /Data.py | d8e917bf4fa96358299cdd241123799362a03919 | [] | no_license | 18021009/project | 656b6c8f9a0120c1185493d04405660895db93e9 | 0133f412e50e3dadd13bd0028832babf846070e5 | refs/heads/main | 2023-05-07T17:08:41.529766 | 2021-06-01T04:06:38 | 2021-06-01T04:06:38 | 372,696,937 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,718 | py | from math import nan
from os import name
from Station import station
import numpy as np
import datetime
import pandas as pd
from Map import map
from Point import point
# standardline date data.csv to college.csv
# ds = pd.read_csv('data.csv')
def changeToDate(output_file):
ds = pd.read_csv('data.csv')
day_delta = datetime.timedelta(days=1)
start_date = datetime.date(2019, 1, 1)
end_date = datetime.date(2020, 1, 1)
for i in range((end_date - start_date).days):
day = start_date + i*day_delta
_day = day.strftime('X%m/X%d/%Y').replace('X0','X').replace('X','')
ds['time'] = ds['time'].replace({_day: day})
ds.to_csv(output_file, index=False)
def buffer_data(input_file, buffer):
dataStation = pd.read_csv(input_file)
dataStation['wind_speed'] = nan
dataStation['temperature'] = nan
dataStation['satellite_NO2'] = nan
dataStation["road_density"] = nan
dataStation["relative_humidity"] = nan
dataStation["pressure"] = nan
dataStation["population_density"] = nan
dataStation["pblh"] = nan
dataStation["NDVI"] = nan
dataStation["dpt"] = nan
dataStationArray = dataStation.values
dataStation = pd.DataFrame(dataStationArray, columns=['time', 'lat', 'long', 'NO2', 'name', 'wind_speed' + str(buffer), 'temperature' + str(buffer), 'satellite_NO2' + str(buffer), 'road_density' + str(buffer), 'relative_humidity' + str(buffer), 'pressure' + str(buffer), 'population_density' + str(buffer), 'pblh' + str(buffer), 'NDVI' + str(buffer), 'dpt' + str(buffer)])
dataStation.to_csv(input_file, float_format='{:f}'.format, index=False)
changeToDate('buffer_1_data.csv')
buffer_data('buffer_1_data.csv', 1)
changeToDate('buffer_2_data.csv')
buffer_data('buffer_2_data.csv', 2)
changeToDate('buffer_3_data.csv')
buffer_data('buffer_3_data.csv', 3)
# a = pd.read_csv("buffer_1_data.csv")
# b = pd.read_csv("buffer_2_data.csv")
# merged = a.merge(b, on=['time', 'lat', 'long', 'name'], how='inner')
# merged.to_csv('merge.csv', index=False)
# c = pd.read_csv("merge.csv")
# d = pd.read_csv("buffer_3_data.csv")
# merged = c.merge(d, on=['time', 'lat', 'long', 'name'], how='inner')
# merged.to_csv('merge.csv', index=False)
# buffer_radius
# _buffer_radius = 1
# dataStation = pd.read_csv('college.csv')
# dataStation['wind_speed'] = -999.0
# dataStation["road_dens"] = -999.0
# dataStation["pp_dens"] = -999.0
# dataStation["earth_no2"] = -999.0
# dataStationArray = dataStation.values
# # add wind speed to dataStationArray
# start_date = datetime.date(2019, 1, 1)
# end_date = datetime.date(2020, 1, 1)
# day_delta = datetime.timedelta(days=1)
# for i in range((end_date - start_date).days):
# fileName = "WSPDCombine_"
# day = start_date + i*day_delta
# file = "map/wind_speed/" + fileName + day.strftime('%Y%m%d') + ".tif"
# _map = map()
# _map.setMap(file)
# for data in dataStationArray:
# if((data[0] == day.strftime('%Y-%m-%d'))):
# _point = point(data[2], data[1])
# _point.set_position_on_matrix(_map)
# _station = station(_point, _buffer_radius)
# _station.setBufferValue(_map)
# data[5] = np.float64(_station.bufferValue)
# # add road to college.csv
# _map = map()
# _map.setMap('map/road_density/road_dens.tif')
# for data in dataStationArray:
# _point = point(data[2], data[1])
# _point.set_position_on_matrix(_map)
# _station = station(_point, _buffer_radius)
# _station.setBufferValue(_map)
# data[6] = _station.bufferValue
# # add population_density
# _map = map()
# _map.setMap('map/population_density/ppd.tif')
# for data in dataStationArray:
# _point = point(data[2], data[1])
# _point.set_position_on_matrix(_map)
# _station = station(_point, _buffer_radius)
# _station.setBufferValue(_map)
# data[7] = _station.bufferValue
# # add earth_no2
# for i in range((end_date - start_date).days):
# fileName = "NO2_"
# day = start_date + i*day_delta
# file = "map/NO2/" + fileName + day.strftime('%Y%m%d') + ".tif"
# _map = map()
# _map.setMap(file)
# for data in dataStationArray:
# if((data[0] == day.strftime('%Y-%m-%d'))):
# _point = point(data[2], data[1])
# _point.set_position_on_matrix(_map)
# _station = station(_point, _buffer_radius)
# _station.setBufferValue(_map)
# data[8] = _station.bufferValue
# newDataStation = pd.DataFrame(dataStationArray, columns=['time', 'lat', 'long', 'NO2', 'name', 'wind_speed', 'road_dens', 'pp_dens', 'earth_no2'])
# newDataStation.to_csv('college_2.csv', float_format='{:f}'.format, index=False)
| [
"[email protected]"
] | |
181a7dc33b61cdc418e9314d9e6ba8faa6a0d378 | 0d7d344edf0dc4b905b12a96a004a773191aa26f | /visas/admin.py | b00da55229665e711a24d095008554baee723958 | [] | no_license | BoughezalaMohamedAimen/Amine | ae615ca64c5d0c8977e26aee2906e606439250d5 | 6060d48ab1308c217fe1bd8bd419369f83cb733a | refs/heads/master | 2020-06-27T11:57:30.682966 | 2019-08-04T22:56:41 | 2019-08-04T22:56:41 | 199,948,247 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 110 | py | from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(Visa)
| [
"[email protected]"
] | |
b749d4bcecade6a4c865a8f3a69ebd30954dfe41 | f09dc121f213f2881df3572288b7ee5b39246d73 | /aliyun-python-sdk-config/aliyunsdkconfig/request/v20190108/GetSupportedResourceTypesRequest.py | 8fb02d120fe982b0df0cc395179ce63061909e27 | [
"Apache-2.0"
] | permissive | hetw/aliyun-openapi-python-sdk | 2f31378ad6be0896fb8090423f607e9c7d3ae774 | 7443eacee9fbbaa93c7975c6dbec92d3c364c577 | refs/heads/master | 2023-01-19T22:42:36.214770 | 2020-12-04T10:55:14 | 2020-12-04T10:55:14 | 318,689,093 | 1 | 0 | NOASSERTION | 2020-12-05T03:03:03 | 2020-12-05T03:03:03 | null | UTF-8 | Python | false | false | 1,296 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkconfig.endpoint import endpoint_data
class GetSupportedResourceTypesRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Config', '2019-01-08', 'GetSupportedResourceTypes','Config')
self.set_method('GET')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
| [
"[email protected]"
] | |
5b82ca7833330ee0646d306a6cef65cb5c33df37 | 762742b3c5cb5706e93e12dbdc3f8c46fc65f0db | /Packs/GreyNoise/Integrations/GreyNoise/GreyNoise_test.py | bc42620c600a89f5bf9c62f42f621c88f3b2320f | [
"MIT"
] | permissive | EmersonElectricCo/content | 018f95f7fe7de13819e093a3661587a18407e348 | 82c82bbee7d428f0b14991a88c67672e2c02f5af | refs/heads/master | 2021-06-17T04:54:22.938033 | 2021-05-06T16:39:59 | 2021-05-06T16:39:59 | 161,693,191 | 2 | 0 | MIT | 2018-12-18T15:16:49 | 2018-12-13T20:47:26 | Python | UTF-8 | Python | false | false | 8,763 | py | import pytest
import json
import GreyNoise
from test_data.input_data import ( # type: ignore
parse_code_and_body_data,
get_ip_reputation_score_data,
test_module_data,
ip_reputation_command_data,
ip_quick_check_command_data,
generate_advanced_query_data,
query_command_data,
get_ip_context_data_data,
stats_command_data,
riot_command_response_data
)
class DummyResponse:
"""
Dummy Response object of requests.response for unit testing.
"""
def __init__(self, headers, text, status_code):
self.headers = headers
self.text = text
self.status_code = status_code
def json(self):
"""
Dummy json method.
"""
return json.loads(self.text)
@pytest.mark.parametrize("input_data, expected_output", parse_code_and_body_data)
def test_parse_code_and_body(input_data, expected_output):
"""
Tests various combinations of error codes and messages.
"""
response = GreyNoise.parse_code_and_body(input_data)
assert response == expected_output
@pytest.mark.parametrize("input_data, expected_output", get_ip_reputation_score_data)
def test_get_ip_reputation_score(input_data, expected_output):
"""
Tests various combinations of GreyNoise classification data.
"""
response = GreyNoise.get_ip_reputation_score(input_data)
assert response == expected_output
@pytest.mark.parametrize("api_key, api_response, status_code, expected_output", test_module_data)
def test_test_module(api_key, api_response, status_code, expected_output, mocker):
"""
Tests test_module for GreyNoise integration.
"""
client = GreyNoise.Client(api_key, "dummy_server", 10, "proxy", False, "dummy_integration")
if isinstance(api_key, str) and api_key == "true_key":
mocker.patch('greynoise.GreyNoise._request', return_value=api_response)
response = GreyNoise.test_module(client)
assert response == expected_output
else:
dummy_response = DummyResponse({}, api_response, status_code)
mocker.patch('requests.Session.get', return_value=dummy_response)
with pytest.raises(Exception) as err:
_ = GreyNoise.test_module(client)
assert str(err.value) == expected_output
@pytest.mark.parametrize("args, test_scenario, api_response, status_code, expected_output", ip_reputation_command_data)
def test_ip_reputation_command(args, test_scenario, api_response, status_code, expected_output, mocker):
"""
Tests various combinations of vald and invalid responses for IPReputation command.
"""
client = GreyNoise.Client("true_api_key", "dummy_server", 10, "proxy", False, "dummy_integration")
dummy_response = DummyResponse(
{
"Content-Type": "application/json"
},
json.dumps(api_response),
status_code
)
if test_scenario == "positive":
mocker.patch('requests.Session.get', return_value=dummy_response)
response = GreyNoise.ip_reputation_command(client, args)
assert response[0].outputs == expected_output
else:
mocker.patch('requests.Session.get', return_value=dummy_response)
with pytest.raises(Exception) as err:
_ = GreyNoise.ip_reputation_command(client, args)
assert str(err.value) == expected_output
@pytest.mark.parametrize("args, test_scenario, api_response, status_code, expected_output", ip_quick_check_command_data)
def test_ip_quick_check_command(args, test_scenario, api_response, status_code, expected_output, mocker):
"""
Tests various combinations of valid and invalid responses for ip-quick-check command.
"""
client = GreyNoise.Client("true_api_key", "dummy_server", 10, "proxy", False, "dummy_integration")
dummy_response = DummyResponse(
{
"Content-Type": "application/json"
},
json.dumps(api_response),
status_code
)
if test_scenario == "positive":
mocker.patch('requests.Session.get', return_value=dummy_response)
response = GreyNoise.ip_quick_check_command(client, args)
assert response.outputs == expected_output
elif test_scenario == "negative" and status_code == 200:
mocker.patch('requests.Session.get', return_value=dummy_response)
response = GreyNoise.ip_quick_check_command(client, args)
with open('test_data/quick_check.md') as f:
expected_hr = f.read()
assert response.readable_output == expected_hr
elif test_scenario == "negative":
mocker.patch('requests.Session.get', return_value=dummy_response)
with pytest.raises(Exception) as err:
_ = GreyNoise.ip_quick_check_command(client, args)
assert str(err.value) == expected_output
elif test_scenario == "custom":
mocker.patch('greynoise.GreyNoise.quick', return_value=api_response)
with pytest.raises(Exception) as err:
_ = GreyNoise.ip_quick_check_command(client, args)
assert str(err.value) == expected_output
@pytest.mark.parametrize("args, expected_output", generate_advanced_query_data)
def test_generate_advanced_query(args, expected_output):
"""
Tests various combinations of command arguments to generate GreyNoise advanced_query for query/stats command.
"""
response = GreyNoise.generate_advanced_query(args)
assert response == expected_output
@pytest.mark.parametrize("args, test_scenario, api_response, status_code, expected_output", query_command_data)
def test_query_command(args, test_scenario, api_response, status_code, expected_output, mocker):
"""
Tests various combinations of valid and invalid responses for query command.
"""
client = GreyNoise.Client("true_api_key", "dummy_server", 10, "proxy", False, "dummy_integration")
dummy_response = DummyResponse(
{
"Content-Type": "application/json"
},
json.dumps(api_response),
status_code
)
mocker.patch('requests.Session.get', return_value=dummy_response)
if test_scenario == "positive":
response = GreyNoise.query_command(client, args)
assert response.outputs[GreyNoise.QUERY_OUTPUT_PREFIX['IP']] == expected_output['data']
else:
with pytest.raises(Exception) as err:
_ = GreyNoise.query_command(client, args)
assert str(err.value) == expected_output
@pytest.mark.parametrize("args, test_scenario, api_response, status_code, expected_output", stats_command_data)
def test_stats_command(args, test_scenario, api_response, status_code, expected_output, mocker):
"""
Tests various combinations of valid and invalid responses for stats command.
"""
client = GreyNoise.Client("true_api_key", "dummy_server", 10, "proxy", False, "dummy_integration")
dummy_response = DummyResponse(
{
"Content-Type": "application/json"
},
json.dumps(api_response),
status_code
)
mocker.patch('requests.Session.get', return_value=dummy_response)
if test_scenario == "positive":
response = GreyNoise.stats_command(client, args)
assert response.outputs == expected_output
else:
with pytest.raises(Exception) as err:
_ = GreyNoise.stats_command(client, args)
assert str(err.value) == expected_output
@pytest.mark.parametrize("input_data, expected_output", get_ip_context_data_data)
def test_get_ip_context_data(input_data, expected_output):
"""
Tests various combinations for converting ip-context and query command responses from sdk to Human Readable format.
"""
response = GreyNoise.get_ip_context_data(input_data)
assert response == expected_output
@pytest.mark.parametrize("test_scenario, status_code, input_data, expected", riot_command_response_data)
def test_riot_command(mocker, test_scenario, status_code, input_data, expected):
"""
Test various inputs for riot command
"""
client = GreyNoise.Client(api_key="true_api_key", api_server="dummy_server", timeout=10,
proxy="proxy", use_cache=False, integration_name="dummy_integration")
dummy_response = DummyResponse(
{
"Content-Type": "application/json"
},
json.dumps(expected["raw_data"]),
status_code
)
mocker.patch('requests.Session.get', return_value=dummy_response)
if test_scenario == "positive":
response = GreyNoise.riot_command(client, input_data)
assert response.outputs == expected["raw_data"]
else:
with pytest.raises(Exception) as err:
_ = GreyNoise.riot_command(client, input_data)
assert str(err.value) == expected["error_message"].format(input_data["ip"])
| [
"[email protected]"
] | |
45bb0f11373a3220f0f4387907cff7b0eee4e3f3 | dc72589c38ba179524c2ee2e408c4f37b77cabf3 | /backend/lizz_mob_jul15_dev_7685/urls.py | 8827abab37592bfacf837a0176cebcca38cae754 | [] | no_license | crowdbotics-apps/lizz-mob-jul15-dev-7685 | cbcab97908bd568acc68b606d4c5becdb160364c | a41e88b463169443bcfdf12cf356a958c44f3400 | refs/heads/master | 2022-11-17T22:30:15.286209 | 2020-07-16T17:54:10 | 2020-07-16T17:54:10 | 280,012,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,958 | py | """lizz_mob_jul15_dev_7685 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "lizz mob jul15"
admin.site.site_title = "lizz mob jul15 Admin Portal"
admin.site.index_title = "lizz mob jul15 Admin"
# swagger
api_info = openapi.Info(
title="lizz mob jul15 API",
default_version="v1",
description="API documentation for lizz mob jul15 App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
| [
"[email protected]"
] | |
c08ce6dd49ab813444d35c3c9349c72f052e228b | 5e255ad1360c90478393744586663741a9569c21 | /linebot/models/base.py | 164fca9d9e9240236cfe90b9b6b2b37ba835814f | [
"Apache-2.0"
] | permissive | line/line-bot-sdk-python | d76268e8b542060d6eccbacc5dbfab16960ecc35 | cffd35948238ae24982173e30b1ea1e595bbefd9 | refs/heads/master | 2023-08-31T22:12:31.698183 | 2023-08-28T01:10:09 | 2023-08-28T01:10:09 | 70,553,423 | 1,898 | 1,181 | Apache-2.0 | 2023-09-11T05:14:07 | 2016-10-11T03:42:26 | Python | UTF-8 | Python | false | false | 4,121 | py | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""linebot.models.base module."""
import json
from .. import utils
class Base(object):
"""Base class of model.
Suitable for JSON base data.
"""
def __init__(self, **kwargs):
"""__init__ method.
:param kwargs:
"""
pass
def __str__(self):
"""__str__ method."""
return self.as_json_string()
def __repr__(self):
"""__repr__ method."""
return str(self)
def __eq__(self, other):
"""__eq__ method.
:param other:
"""
return other and self.as_json_dict() == other.as_json_dict()
def __ne__(self, other):
"""__ne__ method.
:param other:
"""
return not self.__eq__(other)
def as_json_string(self):
"""Return JSON string from this object.
:rtype: str
"""
return json.dumps(self.as_json_dict(), sort_keys=True)
def as_json_dict(self):
"""Return dictionary from this object.
:return: dict
"""
data = {}
for key, value in self.__dict__.items():
camel_key = utils.to_camel_case(key)
if isinstance(value, (list, tuple, set)):
data[camel_key] = list()
for item in value:
if hasattr(item, 'as_json_dict'):
data[camel_key].append(item.as_json_dict())
else:
data[camel_key].append(item)
elif hasattr(value, 'as_json_dict'):
data[camel_key] = value.as_json_dict()
elif value is not None:
data[camel_key] = value
return data
@classmethod
def new_from_json_dict(cls, data, use_raw_message=False):
"""Create a new instance from a dict.
:param data: JSON dict
:param bool use_raw_message: Using original Message key as attribute
"""
if use_raw_message:
return cls(use_raw_message=use_raw_message, **data)
new_data = {utils.to_snake_case(key): value
for key, value in data.items()}
return cls(**new_data)
@staticmethod
def get_or_new_from_json_dict(data, cls):
"""Get `cls` object w/ deserialization from json if needed.
If data is instance of cls, return data.
Else if data is instance of dict, create instance from dict.
Else, return None.
:param data:
:param cls:
:rtype: object
"""
if isinstance(data, cls):
return data
elif isinstance(data, dict):
return cls.new_from_json_dict(data)
return None
@staticmethod
def get_or_new_from_json_dict_with_types(
data, cls_map, type_key='type', use_raw_message=False
):
"""Get `cls` object w/ deserialization from json by using type key hint if needed.
If data is instance of one of cls, return data.
Else if data is instance of dict, create instance from dict.
Else, return None.
:param data:
:param cls_map:
:param type_key:
:rtype: object
:param bool use_raw_message: Using original Message key as attribute
"""
if isinstance(data, tuple(cls_map.values())):
return data
elif isinstance(data, dict):
type_val = data[type_key]
if type_val in cls_map:
return cls_map[type_val].new_from_json_dict(data, use_raw_message=use_raw_message)
return None
| [
"[email protected]"
] | |
f9de853a23a36e10aefcbfd18bf0dfcea6055cfa | 19d47d47c9614dddcf2f8d744d883a90ade0ce82 | /pynsxt/swagger_client/models/ns_service_group_list_result.py | bbaee722d7f2d1956d8eea75ec65fa8637b79b2e | [] | no_license | darshanhuang1/pynsxt-1 | 9ed7c0da9b3a64e837a26cbbd8b228e811cee823 | fb1091dff1af7f8b8f01aec715682dea60765eb8 | refs/heads/master | 2020-05-25T14:51:09.932853 | 2018-05-16T12:43:48 | 2018-05-16T12:43:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,282 | py | # coding: utf-8
"""
NSX API
VMware NSX REST API # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.list_result import ListResult # noqa: F401,E501
from swagger_client.models.ns_service_group import NSServiceGroup # noqa: F401,E501
from swagger_client.models.resource_link import ResourceLink # noqa: F401,E501
from swagger_client.models.self_resource_link import SelfResourceLink # noqa: F401,E501
class NSServiceGroupListResult(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'_self': 'SelfResourceLink',
'links': 'list[ResourceLink]',
'schema': 'str',
'cursor': 'str',
'sort_ascending': 'bool',
'sort_by': 'str',
'result_count': 'int',
'results': 'list[NSServiceGroup]'
}
attribute_map = {
'_self': '_self',
'links': '_links',
'schema': '_schema',
'cursor': 'cursor',
'sort_ascending': 'sort_ascending',
'sort_by': 'sort_by',
'result_count': 'result_count',
'results': 'results'
}
def __init__(self, _self=None, links=None, schema=None, cursor=None, sort_ascending=None, sort_by=None, result_count=None, results=None): # noqa: E501
"""NSServiceGroupListResult - a model defined in Swagger""" # noqa: E501
self.__self = None
self._links = None
self._schema = None
self._cursor = None
self._sort_ascending = None
self._sort_by = None
self._result_count = None
self._results = None
self.discriminator = None
if _self is not None:
self._self = _self
if links is not None:
self.links = links
if schema is not None:
self.schema = schema
if cursor is not None:
self.cursor = cursor
if sort_ascending is not None:
self.sort_ascending = sort_ascending
if sort_by is not None:
self.sort_by = sort_by
if result_count is not None:
self.result_count = result_count
self.results = results
@property
def _self(self):
"""Gets the _self of this NSServiceGroupListResult. # noqa: E501
:return: The _self of this NSServiceGroupListResult. # noqa: E501
:rtype: SelfResourceLink
"""
return self.__self
@_self.setter
def _self(self, _self):
"""Sets the _self of this NSServiceGroupListResult.
:param _self: The _self of this NSServiceGroupListResult. # noqa: E501
:type: SelfResourceLink
"""
self.__self = _self
@property
def links(self):
"""Gets the links of this NSServiceGroupListResult. # noqa: E501
The server will populate this field when returing the resource. Ignored on PUT and POST. # noqa: E501
:return: The links of this NSServiceGroupListResult. # noqa: E501
:rtype: list[ResourceLink]
"""
return self._links
@links.setter
def links(self, links):
"""Sets the links of this NSServiceGroupListResult.
The server will populate this field when returing the resource. Ignored on PUT and POST. # noqa: E501
:param links: The links of this NSServiceGroupListResult. # noqa: E501
:type: list[ResourceLink]
"""
self._links = links
@property
def schema(self):
"""Gets the schema of this NSServiceGroupListResult. # noqa: E501
:return: The schema of this NSServiceGroupListResult. # noqa: E501
:rtype: str
"""
return self._schema
@schema.setter
def schema(self, schema):
"""Sets the schema of this NSServiceGroupListResult.
:param schema: The schema of this NSServiceGroupListResult. # noqa: E501
:type: str
"""
self._schema = schema
@property
def cursor(self):
"""Gets the cursor of this NSServiceGroupListResult. # noqa: E501
Opaque cursor to be used for getting next page of records (supplied by current result page) # noqa: E501
:return: The cursor of this NSServiceGroupListResult. # noqa: E501
:rtype: str
"""
return self._cursor
@cursor.setter
def cursor(self, cursor):
"""Sets the cursor of this NSServiceGroupListResult.
Opaque cursor to be used for getting next page of records (supplied by current result page) # noqa: E501
:param cursor: The cursor of this NSServiceGroupListResult. # noqa: E501
:type: str
"""
self._cursor = cursor
@property
def sort_ascending(self):
"""Gets the sort_ascending of this NSServiceGroupListResult. # noqa: E501
:return: The sort_ascending of this NSServiceGroupListResult. # noqa: E501
:rtype: bool
"""
return self._sort_ascending
@sort_ascending.setter
def sort_ascending(self, sort_ascending):
"""Sets the sort_ascending of this NSServiceGroupListResult.
:param sort_ascending: The sort_ascending of this NSServiceGroupListResult. # noqa: E501
:type: bool
"""
self._sort_ascending = sort_ascending
@property
def sort_by(self):
"""Gets the sort_by of this NSServiceGroupListResult. # noqa: E501
Field by which records are sorted # noqa: E501
:return: The sort_by of this NSServiceGroupListResult. # noqa: E501
:rtype: str
"""
return self._sort_by
@sort_by.setter
def sort_by(self, sort_by):
"""Sets the sort_by of this NSServiceGroupListResult.
Field by which records are sorted # noqa: E501
:param sort_by: The sort_by of this NSServiceGroupListResult. # noqa: E501
:type: str
"""
self._sort_by = sort_by
@property
def result_count(self):
"""Gets the result_count of this NSServiceGroupListResult. # noqa: E501
Count of results found (across all pages), set only on first page # noqa: E501
:return: The result_count of this NSServiceGroupListResult. # noqa: E501
:rtype: int
"""
return self._result_count
@result_count.setter
def result_count(self, result_count):
"""Sets the result_count of this NSServiceGroupListResult.
Count of results found (across all pages), set only on first page # noqa: E501
:param result_count: The result_count of this NSServiceGroupListResult. # noqa: E501
:type: int
"""
self._result_count = result_count
@property
def results(self):
"""Gets the results of this NSServiceGroupListResult. # noqa: E501
Paged collection of NSServiceGroups # noqa: E501
:return: The results of this NSServiceGroupListResult. # noqa: E501
:rtype: list[NSServiceGroup]
"""
return self._results
@results.setter
def results(self, results):
"""Sets the results of this NSServiceGroupListResult.
Paged collection of NSServiceGroups # noqa: E501
:param results: The results of this NSServiceGroupListResult. # noqa: E501
:type: list[NSServiceGroup]
"""
if results is None:
raise ValueError("Invalid value for `results`, must not be `None`") # noqa: E501
self._results = results
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NSServiceGroupListResult):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
34659a2890a4b19d6a7a1abb7a98dd6fbe5adce9 | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/Ablation4_ch016_ep003_7_10/Gather2_W_fixGood_C_change/train/pyr_4s/L6/step10_a.py | 2202753791e6d77741009c3408d45023e128a019 | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140,332 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
code_dir = "\\".join(code_exe_path_element[:-1])
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
sys.path.append(code_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" code_dir:", code_dir)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir) ### 舉例: template_dir: 7_mask_unet/5_os_book_and_paper_have_dtd_hdr_mix_bg_tv_s04_mae
#############################################################################################################################################################################################################
exp_dir = template_dir
#############################################################################################################################################################################################################
from step06_a_datas_obj import *
from step09_4side_L6 import *
from step10_a2_loss_info_obj import *
from step10_b2_exp_builder import Exp_builder
rm_paths = [path for path in sys.path if code_dir in path]
for rm_path in rm_paths: sys.path.remove(rm_path)
rm_moduless = [module for module in sys.modules if "step09" in module]
for rm_module in rm_moduless: del sys.modules[rm_module]
import Exps_7_v3.doc3d.Ablation4_ch016_ep003_7_10.W_w_M_to_C_pyr.pyr_4s.L6.step10_a as W_w_M_to_C_p20_pyr
from Exps_7_v3.doc3d.Ablation4_ch016_ep003_7_10.I_w_M_to_W_pyr.pyr_3s.L5.step10_a import ch032_1side_6__2side_5__3side_2__ep010 as I_w_M_to_W_p20_3s_L5_Good
#############################################################################################################################################################################################################
'''
exp_dir 是 決定 result_dir 的 "上一層"資料夾 名字喔! exp_dir要巢狀也沒問題~
比如:exp_dir = "6_mask_unet/自己命的名字",那 result_dir 就都在:
6_mask_unet/自己命的名字/result_a
6_mask_unet/自己命的名字/result_b
6_mask_unet/自己命的名字/...
'''
use_db_obj = type8_blender_kong_doc3d_v2
use_loss_obj = [mae_s001_sobel_k9_s001_loss_info_builder.set_loss_target("UNet_Wz").copy(), mae_s001_sobel_k9_s001_loss_info_builder.set_loss_target("UNet_Wy").copy(), mae_s001_sobel_k9_s001_loss_info_builder.set_loss_target("UNet_Wx").copy(), mae_s001_sobel_k9_s001_loss_info_builder.set_loss_target("UNet_Cx").copy(), mae_s001_sobel_k9_s001_loss_info_builder.set_loss_target("UNet_Cy").copy()] ### z, y, x 順序是看 step07_b_0b_Multi_UNet 來對應的喔
#############################################################
### 為了resul_analyze畫空白的圖,建一個empty的 Exp_builder
empty = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_4side_1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="為了resul_analyze畫空白的圖,建一個empty的 Exp_builder")
#############################################################
# "1" 3 6 10 15 21 28 36 45 55
# side1 OK 1
ch032_1side_1__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s1__2s1__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_1__2side_1__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# side2 OK 4
ch032_1side_2__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_1__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s2__2s1__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_2__2side_1__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s2__2s2__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_2__2side_2__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s2__2s2__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_2__2side_2__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s2__2s2__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_2__2side_2__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
ch032_1side_3__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_1__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s1__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_1__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s2__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_2__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s2__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_2__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s2__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_2__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s3__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_3__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s3__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_3__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s3__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_3__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s3__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_3__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s3__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_3__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s3__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_3__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
ch032_1side_4__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_1__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s1__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_1__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s2__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_2__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s2__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_2__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s2__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_2__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s3__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_3__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s3__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_3__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s3__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_3__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s3__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_3__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s3__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_3__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s3__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_3__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s4__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_4_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s4__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_4_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s4__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_4_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s4__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_4_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
ch032_1side_5__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_1__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s1__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_1__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s2__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_2__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s2__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_2__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s2__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_2__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s3__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_3__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s3__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_3__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s3__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_3__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s3__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_3__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s3__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_3__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s3__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_3__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s4__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_4_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s4__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_4_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s4__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_4_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s4__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_4_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s4__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_4_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s4__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_4_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s4__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_4_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s4__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_4_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s5__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_5_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s5__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_5_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s5__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_5_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s5__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_5_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s5__4s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_5_4side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
# 1 3 6 10 15 "21" 28 36 45 55
# side6 OK 56
ch032_1side_6__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_1__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s1__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_1__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s2__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_2__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s2__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_2__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s2__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_2__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s3__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_3__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s3__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_3__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s3__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_3__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s3__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_3__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s3__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_3__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s3__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_3__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s4__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_4_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s4__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_4_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s4__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_4_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s4__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_4_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s4__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_4_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s4__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_4_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s4__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_4_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s4__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_4_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s5__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_5_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s5__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_5_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s5__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_5_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s5__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_5_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s5__4s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_5_4side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s4__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_4_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s4__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_4_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s4__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_4_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s4__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_4_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s5__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_5_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s5__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_5_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s5__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_5_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s5__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_5_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s5__4s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_5_4side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s6__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_6_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s6__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_6_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s6__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_6_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s6__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_6_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s6__4s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_6_4side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s6__4s6") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_6_4side_6, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
# 1 3 6 10 15 21 "28" 36 45 55
# side7 OK 84
ch032_1side_7__2side_1__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_1__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s1__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_1__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_2__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_2__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s2__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_2__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_2__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_2__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s2__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_2__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_2__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_2__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s2__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_2__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_3__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_3__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s3__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_3__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_3__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_3__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s3__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_3__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_3__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_3__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s3__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_3__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_3__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_3__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s3__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_3__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_3__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_3__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s3__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_3__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_3__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_3__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s3__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_3__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_4_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s4__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_4_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_4_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s4__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_4_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_4_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s4__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_4_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_4_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s4__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_4_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_4_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s4__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_4_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_4_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s4__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_4_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_4_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s4__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_4_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_4_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s4__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_4_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_5_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s5__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_5_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_5_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s5__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_5_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_5_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s5__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_5_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_5_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s5__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_5_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_5_4side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s5__4s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_5_4side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_4_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s4__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_4_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_4_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s4__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_4_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_4_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s4__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_4_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_4_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s4__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_4_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_5_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s5__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_5_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_5_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s5__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_5_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_5_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s5__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_5_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_5_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s5__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_5_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_5_4side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s5__4s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_5_4side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_6_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s6__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_6_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_6_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s6__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_6_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_6_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s6__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_6_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_6_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s6__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_6_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_6_4side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s6__4s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_6_4side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_6_4side_6_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s6__4s6") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_6_4side_6, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_1_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_1_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s1__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_1_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_2_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_2_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s2__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_2_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_2_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_2_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s2__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_2_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_3_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_3_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s3__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_3_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_3_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_3_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s3__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_3_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_3_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_3_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s3__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_3_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_4_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_4_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s4__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_4_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_4_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_4_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s4__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_4_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_4_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_4_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s4__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_4_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_4_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_4_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s4__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_4_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_5_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_5_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s5__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_5_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_5_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_5_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s5__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_5_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_5_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_5_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s5__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_5_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_5_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_5_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s5__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_5_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_5_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_5_4side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s5__4s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_5_4side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_6_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_6_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s6__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_6_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_6_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_6_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s6__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_6_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_6_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_6_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s6__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_6_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_6_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_6_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s6__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_6_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_6_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_6_4side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s6__4s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_6_4side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_6_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_6_4side_6_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s6__4s6") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_6_4side_6, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_7_4side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_7_4side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s7__4s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_7_4side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_7_4side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_7_4side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s7__4s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_7_4side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_7_4side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_7_4side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s7__4s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_7_4side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_7_4side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_7_4side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s7__4s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_7_4side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_7_4side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_7_4side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s7__4s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_7_4side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_7_4side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_7_4side_6_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s7__4s6") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_7_4side_6, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_7_4side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_7_4side_7_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s7__4s7") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_7_4side_7, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
#############################################################
if(__name__ == "__main__"):
print("build exps cost time:", time.time() - start_time)
if len(sys.argv) < 2:
############################################################################################################
### 直接按 F5 或打 python step10_b1_exp_obj_load_and_train_and_test.py,後面沒有接東西喔!才不會跑到下面給 step10_b_subprocss.py 用的程式碼~~~
ch032_1side_1__2side_1__3side_1_4side_1.build().run()
# print('no argument')
sys.exit()
### 以下是給 step10_b_subprocess.py 用的,相當於cmd打 python step10_b1_exp_obj_load_and_train_and_test.py 某個exp.build().run()
eval(sys.argv[1])
| [
"[email protected]"
] | |
241b062d29b2a2e895a396fb385dd2ffb44bab96 | 3ff9821b1984417a83a75c7d186da9228e13ead9 | /No_1410_HTML Entity Parser/by_re_replacement.py | c017682935944a4f3a73df684c4c097a91d80e6d | [
"MIT"
] | permissive | brianchiang-tw/leetcode | fd4df1917daef403c48cb5a3f5834579526ad0c2 | 6978acfb8cb767002cb953d02be68999845425f3 | refs/heads/master | 2023-06-11T00:44:01.423772 | 2023-06-01T03:52:00 | 2023-06-01T03:52:00 | 222,939,709 | 41 | 12 | null | null | null | null | UTF-8 | Python | false | false | 3,076 | py | '''
Description:
HTML entity parser is the parser that takes HTML code as input and replace all the entities of the special characters by the characters itself.
The special characters and their entities for HTML are:
Quotation Mark: the entity is " and symbol character is ".
Single Quote Mark: the entity is ' and symbol character is '.
Ampersand: the entity is & and symbol character is &.
Greater Than Sign: the entity is > and symbol character is >.
Less Than Sign: the entity is < and symbol character is <.
Slash: the entity is ⁄ and symbol character is /.
Given the input text string to the HTML parser, you have to implement the entity parser.
Return the text after replacing the entities by the special characters.
Example 1:
Input: text = "& is an HTML entity but &ambassador; is not."
Output: "& is an HTML entity but &ambassador; is not."
Explanation: The parser will replace the & entity by &
Example 2:
Input: text = "and I quote: "...""
Output: "and I quote: \"...\""
Example 3:
Input: text = "Stay home! Practice on Leetcode :)"
Output: "Stay home! Practice on Leetcode :)"
Example 4:
Input: text = "x > y && x < y is always false"
Output: "x > y && x < y is always false"
Example 5:
Input: text = "leetcode.com⁄problemset⁄all"
Output: "leetcode.com/problemset/all"
Constraints:
1 <= text.length <= 10^5
The string may contain any possible characters out of all the 256 ASCII characters.
'''
import re
class Solution:
def entityParser(self, text: str) -> str:
html_symbol = [ '"', ''', '>', '<', '⁄', '&']
formal_symbol = [ '"', "'", '>', '<', '/', '&']
for html_sym, formal_sym in zip(html_symbol, formal_symbol):
text = re.sub( html_sym , formal_sym, text )
return text
# n : the character length of input, text.
## Time Complexity: O( n )
#
# The overhead in time is the cost of string replacement, which is of O( n ).
## Space Complexity: O( n )
#
# The overhead in space is the storage for output string, which is of O( n ).
from collections import namedtuple
TestEntry = namedtuple('TestEntry', 'text')
def test_bench():
test_data = [
TestEntry( text = "& is an HTML entity but &ambassador; is not." ),
TestEntry( text = "and I quote: "..."" ),
TestEntry( text = "Stay home! Practice on Leetcode :)" ),
TestEntry( text = "x > y && x < y is always false" ),
TestEntry( text = "leetcode.com⁄problemset⁄all" ),
]
# expected output:
'''
& is an HTML entity but &ambassador; is not.
and I quote: "..."
Stay home! Practice on Leetcode :)
x > y && x < y is always false
leetcode.com/problemset/all
'''
for t in test_data:
print( Solution().entityParser( text = t.text) )
return
if __name__ == '__main__':
test_bench() | [
"[email protected]"
] | |
f76d5f3aec244f5d33fcd7e2887d2eb61bb5658a | 0b25b1c2ea3e3f05ea388e1105cd2fab50e7ba54 | /mysite/blog/forms.py | 68ba9afdb13950be95db2f366aa5aebf783e6d1c | [] | no_license | webclinic017/Django-project | f8337aeb296d12760143951635d0297c13313a50 | e757aef633c63aaf857afd1f274d42d16703ca0c | refs/heads/master | 2022-12-25T17:30:14.503627 | 2020-10-12T08:47:08 | 2020-10-12T08:47:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 397 | py | from django import forms
from .models import Comment
class EmailPostForm(forms.Form):
name = forms.CharField()
email = forms.EmailField()
to = forms.EmailField()
comments = forms.CharField(required=False,
widget=forms.Textarea)
class CommentForm(forms.ModelForm):
class Meta:
model = Comment
fields = ('name', 'email', 'body') | [
"[email protected]"
] | |
241ccb6c7c4ae0c34b892c9d317fbd849d3ea4ef | 7fba01da6426480612d7cef9ceb2e15f3df6d01c | /PYTHON/pythonDesafios/venv/lib/python3.9/site-packages/santos/santos.py | b862f2661c3fd15467cd45185f3ff2200ba50eaa | [
"MIT"
] | permissive | Santos1000/Curso-Python | f320fec1e7ced4c133ade69acaa798d431e14113 | 549223a1633f6f619c87554dd8078cf7841bb1df | refs/heads/main | 2023-05-26T12:01:23.868814 | 2021-05-26T13:22:58 | 2021-05-26T13:22:58 | 371,039,290 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,153 | py | __author__ = 'anderson'
# -*- coding: utf-8 -*-
from threading import Thread
from datetime import datetime
from exceptions import TaskException
import logging
log = logging.getLogger(__name__)
class ControlJobs:
__jobs = []
def stop(self, jobname):
log.debug("Job name %s" % jobname)
log.debug(self.__jobs)
for idx, th in enumerate(self.__jobs):
if jobname in th:
th[jobname]._stop()
del self.__jobs[idx]
break
def addjob(self, job):
self.__jobs.append(job)
log.debug(self.__jobs)
stopjobs = ControlJobs()
class TaskScheduling(Thread):
"""
Os parâmetros aceitos são:
seconds, minutes, hour, time_of_the_day, day_of_the_week, day_of_the_month
Descrição:
O parâmetro seconds define que a função será executada repetidamente na frequência do valor passado em segundos
ex: seconds="20", será executado de 20 em 20 segundos
O parâmetro minutes define que a função será executada repetidamente na frequência do valor passado em minutos
ex: minutes="20", será executado de 20 em 20 minutos
O parâmetro hour define que a função será executada repetidamente na frequência do valor passado em horas
ex: hour="2", será executado de 2 em 2 horas
obs: Esses três parâmetros não podem ser combinados, nem entre e nem com os dois abaixo.
O parâmetro time_of_the_day define que a função será executada todo dia em um horário específico, que deve ser
passado no seguinte formato hh:mm:ss.(hh: 0..23 ; mm: 0..59, ss: 0..59)
ex: time_of_the_day="14:15:00", será executada todo dia às quartoze horas e quinze minutos
O parâmetro day_of_the_week define que a função será executada no dia da semana passado como valor.
Os valores possíveis são: Su(Sunday/Domingo), M(Monday/Segunda), Tu(Tuesday/Terça), W(Wednesday/Quarta),
Th(Thursday/Quinta), F(Friday/Sexta), Sa(Saturday/Sábado) em maiúsculo.
Tem que ser combinado com o parâmetro time_of_the_day para especificar a hora, minuto e segundo daquele
dia da semana.
ex: day_of_the_week="W" time_of_the_day="22:00:00", Será executado toda quarta às vinte e dua horas.
Exemplos de uso:
Basta decorar a função ou método da classe que se queira agendar.
@TaskScheduling(seconds="30")
def do_something(a):
print("Print do_something: %s" % a)
import time
time.sleep(6)
print("terminou do_something")
do_something()
*****************************************
class Teste(object):
@TaskScheduling(time_of_the_day="08:30:00")
def some_function(self, a):
print("Print some_function: %s" % a)
import time
print("Função some_function")
time.sleep(10)
print("terminou some_function")
obj = Teste()
obj.some_function("b")
"""
days = {"M": 0, "Tu": 1, "W": 2, "Th": 3, "F": 4, "Sa": 5, "Su": 6}
#recebe os parametros do decorator
def __init__(self, *arguments, **argumentsMap):
Thread.__init__(self)
self.args = arguments
self.argumentsMap = argumentsMap
self.threadname = argumentsMap["name"]
self.execute = False
log.debug("Arguments: %r:" % self.argumentsMap)
#É o decorador de verdade, recebe a função decorada, como é uma classe preciso implementar o método call
def __call__(self, function):
self.function = function
#recebe os argumentos da função decorada
def task(*functionargs, **functionArgumentsMap):
self.functionargs = functionargs
self.functionArgumentsMap = functionArgumentsMap
stopjobs.addjob({self.threadname: self})
self.start()
return task
def run(self):
try:
log.debug("JOB RUNNING")
import time
self.execute = True
while self.execute:
interval = self.calculateInterval()
log.debug("Interval: %r in seconds" % interval)
time.sleep(interval)
self.function(*self.functionargs, **self.functionArgumentsMap)
except TaskException as t:
log.debug(t)
def _stop(self):
log.debug("STOP")
self.execute = False
return self.execute
def calculateInterval(self):
"""
É responsável por determinar o tempo em segundos da próxima tarefa.
Quando o parâmetro para determinar o tempo da pŕoxima tarefa for time_of_the_day é
chamado o método auxCalculate para determinar tal tempo.
:return:
"""
if "day_of_the_week" in self.argumentsMap:
if "hour" in self.argumentsMap or "minutes" in self.argumentsMap or "seconds" in self.argumentsMap:
raise TaskException("Parametros extras que não combinam")
if "time_of_the_day" in self.argumentsMap:
return self.calculateDayOfTheWeek(self.argumentsMap["day_of_the_week"],
self.argumentsMap["time_of_the_day"])
else:
raise TaskException("Parâmetro time_of_the_day não está presente")
elif "time_of_the_day" in self.argumentsMap:
if "hour" in self.argumentsMap or "minutes" in self.argumentsMap or "seconds" in self.argumentsMap:
raise TaskException("Parametros extras que não combinam")
return self.auxCalculate(self.argumentsMap["time_of_the_day"])[0]
elif "hour" in self.argumentsMap:
if "seconds" in self.argumentsMap or "minutes" in self.argumentsMap:
raise TaskException("Parametros extras que não combinam")
return int(self.argumentsMap["hour"]) * 3600
elif "minutes" in self.argumentsMap:
if "seconds" in self.argumentsMap:
raise TaskException("Parametros extras que não combinam")
else:
return int(self.argumentsMap["minutes"]) * 60
elif "seconds" in self.argumentsMap:
log.debug("seconds")
return int(self.argumentsMap["seconds"])
else:
raise TaskException("Parâmetro(s): %r inválidos" % self.argumentsMap)
def calculateDayOfTheWeek(self, day_of_the_week, time_of_the_day):
entrada = day_of_the_week
weekday = datetime.now().weekday()
dif = self.days[entrada] - weekday
sleep, diference = self.auxCalculate(time_of_the_day)
if self.days[entrada] == weekday:
if diference > 0:
return sleep
else:
return sleep + (6 * (24*3600)) #24 horas para segundo
elif self.days[entrada] > weekday:
if diference > 0:
return sleep + (dif * (24*3600))
else:
#Se a entrada já é o dia seguinte, basta retornar o sleep pois já está calculada o tempo para o horário do outro dia.
if dif == 1:
return sleep
else:
return sleep + ((dif-1) * (24*3600)) #24 horas para segundo
else:
#numero de dias de diferença
resp = 7 - abs(dif)
if diference > 0:
return sleep + (resp * (24*3600))
else:
#Se a entrada já é o dia seguinte, basta retornar o sleep pois já está calculada o tempo para o horário do outro dia.
if resp == 1:
return sleep
else:
return sleep + ((resp-1) * (24*3600)) #24 horas para segundo
def auxCalculate(self, time_of_the_day):
"""
Essa método retorno o tempo em segundos para que a tarefa seja sempre executada na hora escolhida.
:param time_of_the_day:
:return: sleep_time
"""
try:
times = [3600, 60, 1]
one_day_has = '24:00:00'.split(":")
time_day = sum([a*b for a, b in zip(times, [int(i) for i in one_day_has])])
aux_time = time_of_the_day.split(":")
time_want = sum([a*b for a, b in zip(times, [int(i) for i in aux_time])])
#Transforma o tempo atual para segundos
hjf = datetime.now().strftime("%H:%M:%S").split(":")
now = sum([a*b for a, b in zip(times, [int(i) for i in hjf])])
#diferença entre o tempo atual e o tempo desejado em segundos
diference = time_want - now
sleep_time = None
if diference < 0:
#só será executado no outro dia
sleep_time = time_day - (diference * (-1))
else:
#ainda será executado no mesmo dia
sleep_time = diference
except TaskException as t:
log.debug(t)
return sleep_time, diference
| [
"[email protected]"
] | |
91c2e382f455de622a8bfb58b1df4f5bbe6b01ff | e13a79dec2668c1870b3fea05f071fe872d400f0 | /pde/storage/tests/test_generic_storages.py | 474649dd328980f34d7df91ecac637408b9e3bd6 | [
"MIT"
] | permissive | yiweizhang1025/py-pde | b27cc0b058b50d6af921e1ea84bf59a5bb0ff370 | 3862a35505b9ce4d62557bc65dfedd40638a90f3 | refs/heads/master | 2023-03-14T17:21:07.004742 | 2021-03-15T15:33:47 | 2021-03-15T15:33:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,739 | py | """
.. codeauthor:: David Zwicker <[email protected]>
"""
import functools
import numpy as np
import pytest
from pde import DiffusionPDE, FileStorage, MemoryStorage, UnitGrid
from pde.fields import FieldCollection, ScalarField, Tensor2Field, VectorField
from pde.tools.misc import module_available
def test_storage_write(tmp_path):
""" test simple memory storage """
dim = 5
grid = UnitGrid([dim])
field = ScalarField(grid)
storage_classes = {"MemoryStorage": MemoryStorage}
if module_available("h5py"):
file_path = tmp_path / "test_storage_write.hdf5"
storage_classes["FileStorage"] = functools.partial(FileStorage, file_path)
for name, storage_cls in storage_classes.items():
storage = storage_cls(info={"a": 1})
storage.start_writing(field, info={"b": 2})
storage.append(field.copy(data=np.arange(dim)), 0)
storage.append(field.copy(data=np.arange(dim)), 1)
storage.end_writing()
assert not storage.has_collection
np.testing.assert_allclose(storage.times, np.arange(2))
for f in storage:
np.testing.assert_array_equal(f.data, np.arange(dim))
for i in range(2):
np.testing.assert_array_equal(storage[i].data, np.arange(dim))
assert {"a": 1, "b": 2}.items() <= storage.info.items()
storage = storage_cls()
storage.clear()
for i in range(3):
storage.start_writing(field)
storage.append(field.copy(data=np.arange(dim) + i), i)
storage.end_writing()
np.testing.assert_allclose(
storage.times, np.arange(3), err_msg="storage class: " + name
)
def test_storage_truncation(tmp_path):
""" test whether simple trackers can be used """
file = tmp_path / "test_storage_truncation.hdf5"
for truncate in [True, False]:
storages = [MemoryStorage()]
if module_available("h5py"):
storages.append(FileStorage(file))
tracker_list = [s.tracker(interval=0.01) for s in storages]
grid = UnitGrid([8, 8])
state = ScalarField.random_uniform(grid, 0.2, 0.3)
pde = DiffusionPDE()
pde.solve(state, t_range=0.1, dt=0.001, tracker=tracker_list)
if truncate:
for storage in storages:
storage.clear()
pde.solve(state, t_range=[0.1, 0.2], dt=0.001, tracker=tracker_list)
times = np.arange(0.1, 0.201, 0.01)
if not truncate:
times = np.r_[np.arange(0, 0.101, 0.01), times]
for storage in storages:
msg = f"truncate={truncate}, storage={storage}"
np.testing.assert_allclose(storage.times, times, err_msg=msg)
assert not storage.has_collection
def test_storing_extract_range(tmp_path):
""" test methods specific to FieldCollections in memory storage """
sf = ScalarField(UnitGrid([1]))
storage_classes = {"MemoryStorage": MemoryStorage}
if module_available("h5py"):
file_path = tmp_path / "test_storage_write.hdf5"
storage_classes["FileStorage"] = functools.partial(FileStorage, file_path)
for storage_cls in storage_classes.values():
# store some data
s1 = storage_cls()
s1.start_writing(sf)
s1.append(sf.copy(data=np.array([0])), 0)
s1.append(sf.copy(data=np.array([2])), 1)
s1.end_writing()
np.testing.assert_equal(s1[0].data, 0)
np.testing.assert_equal(s1[1].data, 2)
np.testing.assert_equal(s1[-1].data, 2)
np.testing.assert_equal(s1[-2].data, 0)
with pytest.raises(IndexError):
s1[2]
with pytest.raises(IndexError):
s1[-3]
# test extraction
s2 = s1.extract_time_range()
assert s2.times == list(s1.times)
np.testing.assert_allclose(s2.data, s1.data)
s3 = s1.extract_time_range(0.5)
assert s3.times == s1.times[:1]
np.testing.assert_allclose(s3.data, s1.data[:1])
s4 = s1.extract_time_range((0.5, 1.5))
assert s4.times == s1.times[1:]
np.testing.assert_allclose(s4.data, s1.data[1:])
def test_storing_collection(tmp_path):
""" test methods specific to FieldCollections in memory storage """
grid = UnitGrid([2, 2])
f1 = ScalarField.random_uniform(grid, 0.1, 0.4, label="a")
f2 = VectorField.random_uniform(grid, 0.1, 0.4, label="b")
f3 = Tensor2Field.random_uniform(grid, 0.1, 0.4, label="c")
fc = FieldCollection([f1, f2, f3])
storage_classes = {"MemoryStorage": MemoryStorage}
if module_available("h5py"):
file_path = tmp_path / "test_storage_write.hdf5"
storage_classes["FileStorage"] = functools.partial(FileStorage, file_path)
for storage_cls in storage_classes.values():
# store some data
storage = storage_cls()
storage.start_writing(fc)
storage.append(fc, 0)
storage.append(fc, 1)
storage.end_writing()
assert storage.has_collection
assert storage.extract_field(0)[0] == f1
assert storage.extract_field(1)[0] == f2
assert storage.extract_field(2)[0] == f3
assert storage.extract_field(0)[0].label == "a"
assert storage.extract_field(0, label="new label")[0].label == "new label"
assert storage.extract_field(0)[0].label == "a" # do not alter label
assert storage.extract_field("a")[0] == f1
assert storage.extract_field("b")[0] == f2
assert storage.extract_field("c")[0] == f3
with pytest.raises(ValueError):
storage.extract_field("nonsense")
def test_storage_apply(tmp_path):
""" test the apply function of StorageBase """
grid = UnitGrid([2])
field = ScalarField(grid)
storage_classes = {"None": None, "MemoryStorage": MemoryStorage}
if module_available("h5py"):
file_path = tmp_path / "test_storage_apply.hdf5"
storage_classes["FileStorage"] = functools.partial(FileStorage, file_path)
s1 = MemoryStorage()
s1.start_writing(field, info={"b": 2})
s1.append(field.copy(data=np.array([0, 1])), 0)
s1.append(field.copy(data=np.array([1, 2])), 1)
s1.end_writing()
for name, storage_cls in storage_classes.items():
out = None if storage_cls is None else storage_cls()
s2 = s1.apply(lambda x: x + 1, out=out)
assert storage_cls is None or s2 is out
assert len(s2) == 2
np.testing.assert_allclose(s2.times, s1.times)
assert s2[0] == ScalarField(grid, [1, 2]), name
assert s2[1] == ScalarField(grid, [2, 3]), name
# test empty storage
s1 = MemoryStorage()
s2 = s1.apply(lambda x: x + 1)
assert len(s2) == 0
def test_storage_copy(tmp_path):
""" test the copy function of StorageBase """
grid = UnitGrid([2])
field = ScalarField(grid)
storage_classes = {"None": None, "MemoryStorage": MemoryStorage}
if module_available("h5py"):
file_path = tmp_path / "test_storage_apply.hdf5"
storage_classes["FileStorage"] = functools.partial(FileStorage, file_path)
s1 = MemoryStorage()
s1.start_writing(field, info={"b": 2})
s1.append(field.copy(data=np.array([0, 1])), 0)
s1.append(field.copy(data=np.array([1, 2])), 1)
s1.end_writing()
for name, storage_cls in storage_classes.items():
out = None if storage_cls is None else storage_cls()
s2 = s1.copy(out=out)
assert storage_cls is None or s2 is out
assert len(s2) == 2
np.testing.assert_allclose(s2.times, s1.times)
assert s2[0] == s1[0], name
assert s2[1] == s1[1], name
# test empty storage
s1 = MemoryStorage()
s2 = s1.copy()
assert len(s2) == 0
| [
"[email protected]"
] | |
52f20985a5f0c10e33313979e29aaeaca9acc59f | d806dd4a6791382813d2136283a602207fb4b43c | /sirius/blueprints/api/remote_service/tula/test/script.py | c571a235f8d6348648f5a6cb22945332ad0645a8 | [] | no_license | MarsStirner/sirius | 5bbf2a03dafb7248db481e13aff63ff989fabbc2 | 8839460726cca080ca8549bacd3a498e519c8f96 | refs/heads/master | 2021-03-24T12:09:14.673193 | 2017-06-06T16:28:53 | 2017-06-06T16:28:53 | 96,042,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,263 | py | #! coding:utf-8
"""
@author: BARS Group
@date: 25.10.2016
"""
from sirius.app import app
from sirius.blueprints.api.local_service.risar.active.test.test_data import \
get_mr_appointment_data
from sirius.blueprints.api.local_service.risar.passive.test.request import \
send_event_remote, request_local
from sirius.blueprints.api.local_service.risar.passive.test.test_data import \
get_sch_ticket_data_required, get_send_to_mis_card_data, \
get_send_to_mis_first_ticket25_data, get_send_to_mis_measures_data, \
get_send_to_mis_epicrisis_data, get_send_to_mis_second_ticket25_data, \
get_send_to_mis_pc_ticket25_data, get_send_to_mis_first_checkup_data, \
get_send_to_mis_second_checkup_data, get_send_to_mis_pc_checkup_data
from sirius.blueprints.api.remote_service.tula.passive.checkup_first_ticket25.test.request import \
edit_checkup_first_ticket25
from sirius.blueprints.api.remote_service.tula.passive.checkup_first_ticket25.test.test_data import \
get_first_ticket25_data_more
from sirius.blueprints.api.remote_service.tula.passive.childbirth.test.request import \
create_childbirth, edit_childbirth
from sirius.blueprints.api.remote_service.tula.passive.childbirth.test.test_data import \
get_childbirth_data_required, get_childbirth_data_more
from sirius.blueprints.api.remote_service.tula.passive.client.test.request import \
create_client, edit_client
from sirius.blueprints.api.remote_service.tula.passive.client.test.test_data import \
get_client_data_required, get_client_data_more
from sirius.blueprints.api.remote_service.tula.passive.doctor.test.request import \
create_doctor, edit_doctor, delete_doctor
from sirius.blueprints.api.remote_service.tula.passive.doctor.test.test_data import \
get_doctor_data_required, get_doctor_data_more
from sirius.blueprints.api.remote_service.tula.passive.hospitalization.test.request import \
create_hospitalization, edit_hospitalization
from sirius.blueprints.api.remote_service.tula.passive.hospitalization.test.test_data import \
get_meas_hosp_data_required, get_meas_hosp_data_more
from sirius.blueprints.api.remote_service.tula.passive.organization.test.request import \
create_organization, edit_organization, delete_organization
from sirius.blueprints.api.remote_service.tula.passive.organization.test.test_data import \
get_organization_data_required, get_organization_data_more
from sirius.blueprints.api.remote_service.tula.passive.research.test.request import \
create_research, edit_research
from sirius.blueprints.api.remote_service.tula.passive.research.test.test_data import \
get_meas_research_data_required, get_meas_research_data_more
from sirius.blueprints.api.remote_service.tula.passive.specialists_checkup.test.request import \
create_sp_checkup, edit_sp_checkup
from sirius.blueprints.api.remote_service.tula.passive.specialists_checkup.test.test_data import \
get_sp_checkup_data_required, get_sp_checkup_data_more
from sirius.blueprints.api.test.connect import make_login, release_token
risar_session = None
sirius_session = (None, None)
class _TestTula:
def test_mr_auth(self):
global risar_session
if risar_session:
return
with app.app_context():
with make_login() as sess:
risar_session = sess
print 'test_risar_auth', sess
def test_full_cycle(self, testapp):
ext_org_id = org_id = 111
# mis_to_mr_organisation(testapp, ext_org_id)
ext_doctor_id = doctor_id = 112
# mis_to_mr_doctor(testapp, ext_org_id, ext_doctor_id)
ext_client_id = 113
# mis_to_mr_client(testapp, ext_client_id)
client_id = 110
sch_ticket_id = 3928 # 09:00 23.11.16 Тестовый Пользователь (акушер-гинеколог)
# создать запись на прием в вебе (http://10.1.2.13:6600/patients/search/)
# mr_to_mis_sch_ticket(testapp, org_id, doctor_id, client_id, sch_ticket_id)
# card_id = !mr_create_card(testapp, client_id)
card_id = 468 # создать карту в вебе # 690
ext_card_id = 222
# mr_to_mis_card(testapp, client_id, card_id)
# !mr_create_first_checkup(testapp, card_id)
first_checkup_id = 4345 # создать первичный осмотр в вебе
second_checkup_id = 0 # создать вторичный осмотр в вебе
pc_checkup_id = 0 # создать осмотр ПЦ в вебе
# mr_to_mis_first_checkup(testapp, card_id, first_checkup_id)
# mr_to_mis_first_ticket25(testapp, card_id, first_checkup_id)
ext_first_checkup_id = 222
# mr_to_mis_second_ticket25(testapp, card_id, second_checkup_id)
# mr_to_mis_pc_ticket25(testapp, card_id, pc_checkup_id)
# mr_to_mis_first_checkup(testapp, card_id, first_checkup_id)
# mr_to_mis_second_checkup(testapp, card_id, second_checkup_id)
# mr_to_mis_pc_checkup(testapp, card_id, pc_checkup_id)
# создать направления в вебе - осмотр, госпитализация, исследования
# mr_to_mis_measures(testapp, card_id)
# ch_event_measure_id = 6255
# res_event_measure_id = 6258
ext_ch_event_measure_id = 117
ext_res_event_measure_id = 118
ext_sp_checkup_id = 114
# mis_to_mr_meas_sp_checkup(testapp, ext_card_id, ext_org_id, ext_doctor_id,
# ext_ch_event_measure_id, ext_sp_checkup_id)
# ext_hosp_id = 115
# mis_to_mr_meas_hosp(testapp, card_id, ext_org_id, ext_doctor_id, ext_ch_event_measure_id, ext_hosp_id)
ext_research_id = 116
# mis_to_mr_meas_research(testapp, ext_card_id, ext_org_id, ext_doctor_id,
# ext_res_event_measure_id, ext_research_id)
# mis_to_mr_first_ticket25(testapp, ext_card_id, ext_org_id, ext_doctor_id, ext_first_checkup_id)
# mis_to_mr_second_ticket25
# mis_to_mr_pc_ticket25
# mis_to_mr_childbirth(testapp, ext_card_id, ext_org_id, ext_doctor_id)
# mr_to_mis_epicrisis(testapp, card_id)
def mis_to_mr_organisation(testapp, org_id):
# create_organization(testapp, risar_session, get_organization_data_required(org_id))
# delete_organization(testapp, risar_session, org_id)
edit_organization(testapp, risar_session, org_id, get_organization_data_more(org_id))
def mis_to_mr_doctor(testapp, org_id, doctor_id):
# create_doctor(testapp, risar_session, get_doctor_data_required(org_id, doctor_id))
# delete_doctor(testapp, risar_session, org_id, doctor_id)
edit_doctor(testapp, risar_session, org_id, doctor_id, get_doctor_data_more(org_id, doctor_id))
def mis_to_mr_client(testapp, client_id):
# create_client(testapp, risar_session, get_client_data_required(client_id))
edit_client(testapp, risar_session, client_id, get_client_data_more(client_id))
def mr_make_appointment(testapp, client_id, ticket_id, doctor_id):
is_delete = False
make_appointment(risar_session, get_mr_appointment_data(client_id, ticket_id, doctor_id, is_delete))
def mr_to_mis_sch_ticket(testapp, org_id, doctor_id, client_id, ticket_id):
is_delete = False
send_event_remote(testapp, risar_session, get_sch_ticket_data_required(
is_delete, client_id, ticket_id, org_id, doctor_id
))
# def mr_create_card(testapp, client_id, sch_client_ticket_id=None):
# res = create_card(risar_session, client_id, sch_client_ticket_id)
# card_id = res['result']['card_id']
# return card_id
def mr_to_mis_card(testapp, client_id, card_id):
is_create = False
request_local(testapp, risar_session, get_send_to_mis_card_data(client_id, card_id, is_create))
# def mr_create_first_checkup(testapp, card_id):
# res = create_first_checkup(risar_session, card_id, get_first_checkup_data_required())
# checkup_id = res['result']['checkup_id']
# return checkup_id
def mr_to_mis_first_ticket25(testapp, card_id, checkup_id):
is_create = True
request_local(testapp, risar_session, get_send_to_mis_first_ticket25_data(card_id, checkup_id, is_create))
def mr_to_mis_second_ticket25(testapp, card_id, checkup_id):
is_create = True
request_local(testapp, risar_session, get_send_to_mis_second_ticket25_data(card_id, checkup_id, is_create))
def mr_to_mis_pc_ticket25(testapp, card_id, checkup_id):
is_create = True
request_local(testapp, risar_session, get_send_to_mis_pc_ticket25_data(card_id, checkup_id, is_create))
def mr_to_mis_first_checkup(testapp, card_id, checkup_id):
is_create = True
request_local(testapp, risar_session, get_send_to_mis_first_checkup_data(card_id, checkup_id, is_create))
def mr_to_mis_second_checkup(testapp, card_id, checkup_id):
is_create = True
request_local(testapp, risar_session, get_send_to_mis_second_checkup_data(card_id, checkup_id, is_create))
def mr_to_mis_pc_checkup(testapp, card_id, checkup_id):
is_create = True
request_local(testapp, risar_session, get_send_to_mis_pc_checkup_data(card_id, checkup_id, is_create))
def mr_to_mis_measures(testapp, card_id):
is_create = True
request_local(testapp, risar_session, get_send_to_mis_measures_data(card_id, is_create))
def mis_to_mr_meas_sp_checkup(testapp, card_id, org_id, doctor_id, event_measure_id, sp_checkup_id):
create_sp_checkup(testapp, risar_session, card_id, get_sp_checkup_data_required(
org_id, doctor_id, event_measure_id, sp_checkup_id))
# edit_sp_checkup(testapp, risar_session, card_id, sp_checkup_id, get_sp_checkup_data_more(
# org_id, doctor_id, event_measure_id, sp_checkup_id))
def mis_to_mr_meas_hosp(testapp, card_id, org_id, doctor_id, event_measure_id, meas_hosp_id):
create_hospitalization(testapp, risar_session, card_id, get_meas_hosp_data_required(
org_id, doctor_id, event_measure_id, meas_hosp_id))
edit_hospitalization(testapp, risar_session, card_id, meas_hosp_id, get_meas_hosp_data_more(
org_id, doctor_id, event_measure_id, meas_hosp_id))
def mis_to_mr_meas_research(testapp, card_id, org_id, doctor_id, event_measure_id, meas_research_id):
create_research(testapp, risar_session, card_id, get_meas_research_data_required(
org_id, doctor_id, event_measure_id, meas_research_id))
# edit_research(testapp, risar_session, card_id, meas_research_id, get_meas_research_data_more(
# org_id, doctor_id, event_measure_id, meas_research_id))
def mis_to_mr_first_ticket25(testapp, card_id, org_id, doctor_id, checkup_id):
edit_checkup_first_ticket25(testapp, risar_session, card_id, checkup_id, get_first_ticket25_data_more(
org_id, doctor_id, checkup_id))
def mis_to_mr_childbirth(testapp, card_id, org_id, doctor_id):
# create_childbirth(testapp, risar_session, card_id, get_childbirth_data_required(org_id, doctor_id))
edit_childbirth(testapp, risar_session, card_id, get_childbirth_data_more(org_id, doctor_id))
def mr_to_mis_epicrisis(testapp, card_id):
is_create = False
request_local(testapp, risar_session, get_send_to_mis_epicrisis_data(card_id, is_create))
| [
"[email protected]"
] | |
7b1bd474762dbf9fa0ad77e916a9a288222c806a | 44494598f8edcee0319f3b4ef69b704fbf6d88f2 | /code/twurtle/src/TestDCMotorRobot.py | aad26a3b8a287a62bb2e513d1e4b4b865f1e0879 | [] | no_license | whaleygeek/pyws | 3cebd7e88b41e14d9c1e4dbb8148de63dadbdd57 | e60724646e49287f1e12af609f325ac228b31512 | refs/heads/master | 2021-01-02T09:01:47.644851 | 2014-09-02T19:47:20 | 2014-09-02T19:47:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 168 | py | # This is mainly to test that the packaging has worked for robot correctly
import robot
r = robot.MotorRobot(robot.DCMotorDrive(a1=11, a2=12, b1=13, b2=14))
r.test()
| [
"[email protected]"
] | |
a5f5ad934ab6b4548d185c57b55e75a4fe701d2d | 75dcb56e318688499bdab789262839e7f58bd4f6 | /_algorithms_challenges/pybites/bitesofpy-master/!201-300/239/test_fizzbuzz.py | 374796ea04fb39da68675115964e7be47e23b93c | [] | no_license | syurskyi/Algorithms_and_Data_Structure | 9a1f358577e51e89c862d0f93f373b7f20ddd261 | 929dde1723fb2f54870c8a9badc80fc23e8400d3 | refs/heads/master | 2023-02-22T17:55:55.453535 | 2022-12-23T03:15:00 | 2022-12-23T03:15:00 | 226,243,987 | 4 | 1 | null | 2023-02-07T21:01:45 | 2019-12-06T04:14:10 | Jupyter Notebook | UTF-8 | Python | false | false | 483 | py | from fizzbuzz import fizzbuzz
# write one or more pytest functions below, they need to start with test_
def test_fizzbuzz_base():
assert fizzbuzz(1) == 1
assert fizzbuzz(2) == 2
def test_fizzbuzz_fizz():
assert fizzbuzz(3) == 'Fizz'
assert fizzbuzz(6) == 'Fizz'
def test_fizzbuzz_buzz():
assert fizzbuzz(5) == 'Buzz'
assert fizzbuzz(10) == 'Buzz'
def test_fizzbuzz_fizzbuzz():
assert fizzbuzz(15) == 'Fizz Buzz'
assert fizzbuzz(30) == 'Fizz Buzz'
| [
"[email protected]"
] | |
516909e27870935ab937ccd022e1ac2e00a7cc98 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/sieve-big-2404.py | 36bdabeba62a66987aa786e8dfdb76e27f414dcd | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,752 | py | # A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return $Member[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
| [
"[email protected]"
] | |
ef9b5b666e8749d77a7b64d744affbcd8a64a543 | 963cac9e78c4b742f7e7800200de8d1582799955 | /test/veetou/parserTests.py | 797c7be4f0f217a2fd7bbe13910a3ec1cd8fde32 | [] | no_license | ptomulik/veetou | c79ceb3ca3d7ef7b261b2219489b6f0a7a83e1fa | b30be2a604f4426f832ec9805547ecd6cc9083fe | refs/heads/master | 2021-01-22T17:28:57.271251 | 2019-01-05T01:46:43 | 2020-05-04T16:23:44 | 85,016,513 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,386 | py | #!/usr/bin/env python3
# -*- coding: utf8 -*-
import unittest
import veetou.parser as parser
class Test__Parser(unittest.TestCase):
def test__funcions_symbols__1(self):
self.assertIs(parser.dictmatcher , parser.functions_.dictmatcher)
self.assertIs(parser.fullmatch , parser.functions_.fullmatch)
self.assertIs(parser.fullmatchdict , parser.functions_.fullmatchdict)
self.assertIs(parser.ifullmatch , parser.functions_.ifullmatch)
self.assertIs(parser.imatch , parser.functions_.imatch)
self.assertIs(parser.imatcher , parser.functions_.imatcher)
self.assertIs(parser.match , parser.functions_.match)
self.assertIs(parser.matchdict , parser.functions_.matchdict)
self.assertIs(parser.matcher , parser.functions_.matcher)
self.assertIs(parser.permutexpr , parser.functions_.permutexpr)
self.assertIs(parser.reentrant , parser.functions_.reentrant)
self.assertIs(parser.scatter , parser.functions_.scatter)
self.assertIs(parser.search , parser.functions_.search)
self.assertIs(parser.searchpd , parser.functions_.searchpd)
self.assertIs(parser.skipemptylines , parser.functions_.skipemptylines)
def test__parsererror_symbols__1(self):
self.assertIs(parser.ParserError, parser.parsererror_.ParserError)
def test__parser_symbols__1(self):
self.assertIs(parser.Parser, parser.parser_.Parser)
self.assertIs(parser.RootParser, parser.parser_.RootParser)
def test__addressparser__1(self):
self.assertIs(parser.AddressParser, parser.addressparser_.AddressParser)
def test__contactparser__1(self):
self.assertIs(parser.ContactParser, parser.contactparser_.ContactParser)
def test__footerparser__1(self):
self.assertIs(parser.FooterParser, parser.footerparser_.FooterParser)
def test__headerparser__1(self):
self.assertIs(parser.HeaderParser, parser.headerparser_.HeaderParser)
def test__keymapparser__1(self):
self.assertIs(parser.KeyMapParser, parser.keymapparser_.KeyMapParser)
def test__pageparser__1(self):
self.assertIs(parser.PageParser, parser.pageparser_.PageParser)
def test__preambleparser__1(self):
self.assertIs(parser.PreambleParser, parser.preambleparser_.PreambleParser)
def test__reportparser__1(self):
self.assertIs(parser.ReportParser, parser.reportparser_.ReportParser)
def test__sheetparser__1(self):
self.assertIs(parser.SheetParser, parser.sheetparser_.SheetParser)
def test__summaryparser__1(self):
self.assertIs(parser.SummaryParser, parser.summaryparser_.SummaryParser)
def test__tableparser__1(self):
self.assertIs(parser.TableParser, parser.tableparser_.TableParser)
def test__tbodyparser__1(self):
self.assertIs(parser.TbodyParser, parser.tbodyparser_.TbodyParser)
def test__thparser__1(self):
self.assertIs(parser.ThParser, parser.thparser_.ThParser)
def test__trparser__1(self):
self.assertIs(parser.TrParser, parser.trparser_.TrParser)
if __name__ == '__main__':
unittest.main()
# Local Variables:
# # tab-width:4
# # indent-tabs-mode:nil
# # End:
# vim: set syntax=python expandtab tabstop=4 shiftwidth=4:
| [
"[email protected]"
] | |
832a298328bc29b34d0110a3029f906ad483a34d | 37c3b81ad127c9e3cc26fa9168fda82460ca9bda | /Baekjoon/boj_20055_컨베이어 벨트 위의 로봇.py | dfdb3152402dc2cfac4c545e7cd087fba933dcf0 | [] | no_license | potomatoo/TIL | 5d85b69fdaed68966db7cfe2a565b7c64ed3e816 | 395dc190fa13e5ed036e1e3c7d9e0bc2e1ee4d6c | refs/heads/master | 2021-07-08T16:19:40.410097 | 2021-04-19T02:33:40 | 2021-04-19T02:33:40 | 238,872,774 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 751 | py | def work():
global cnt
while True:
board.rotate(1)
robot.rotate(1)
robot[N-1] = 0
for i in range(N-2, -1, -1):
if robot[i] and not robot[i+1] and board[i+1] > 0:
board[i+1] -= 1
robot[i+1] = 1
robot[i] = 0
robot[N-1] = 0
if not robot[0] and board[0] > 0:
board[0] -= 1
robot[0] = 1
flag = 0
for i in range(len(board)):
if board[i] == 0:
flag += 1
if flag >= K:
break
cnt += 1
from collections import deque
N, K = map(int, input().split())
board = deque(map(int, input().split()))
cnt = 1
robot = deque([0] * len(board))
work()
print(cnt) | [
"[email protected]"
] | |
9305c3a78026026cae6e03d11b5982d9cee7f094 | 0617c812e9bf58a2dbc1c1fef35e497b054ed7e4 | /venv/Lib/site-packages/pyrogram/raw/functions/stats/get_megagroup_stats.py | 320398dd3f9fb86f271aeb14aaca77b3bc298f8c | [] | no_license | howei5163/my_framework | 32cf510e19a371b6a3a7c80eab53f10a6952f7b2 | 492c9af4ceaebfe6e87df8425cb21534fbbb0c61 | refs/heads/main | 2023-01-27T14:33:56.159867 | 2020-12-07T10:19:33 | 2020-12-07T10:19:33 | 306,561,184 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,553 | py | # Pyrogram - Telegram MTProto API Client Library for Python
# Copyright (C) 2017-2020 Dan <https://github.com/delivrance>
#
# This file is part of Pyrogram.
#
# Pyrogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyrogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
from io import BytesIO
from pyrogram.raw.core.primitives import Int, Long, Int128, Int256, Bool, Bytes, String, Double, Vector
from pyrogram.raw.core import TLObject
from pyrogram import raw
from typing import List, Union, Any
# # # # # # # # # # # # # # # # # # # # # # # #
# !!! WARNING !!! #
# This is a generated file! #
# All changes made in this file will be lost! #
# # # # # # # # # # # # # # # # # # # # # # # #
class GetMegagroupStats(TLObject): # type: ignore
"""Telegram API method.
Details:
- Layer: ``117``
- ID: ``0xdcdf8607``
Parameters:
channel: :obj:`InputChannel <pyrogram.raw.base.InputChannel>`
dark (optional): ``bool``
Returns:
:obj:`stats.MegagroupStats <pyrogram.raw.base.stats.MegagroupStats>`
"""
__slots__: List[str] = ["channel", "dark"]
ID = 0xdcdf8607
QUALNAME = "pyrogram.raw.functions.stats.GetMegagroupStats"
def __init__(self, *, channel: "raw.base.InputChannel", dark: Union[None, bool] = None) -> None:
self.channel = channel # InputChannel
self.dark = dark # flags.0?true
@staticmethod
def read(data: BytesIO, *args: Any) -> "GetMegagroupStats":
flags = Int.read(data)
dark = True if flags & (1 << 0) else False
channel = TLObject.read(data)
return GetMegagroupStats(channel=channel, dark=dark)
def write(self) -> bytes:
data = BytesIO()
data.write(Int(self.ID, False))
flags = 0
flags |= (1 << 0) if self.dark is not None else 0
data.write(Int(flags))
data.write(self.channel.write())
return data.getvalue()
| [
"houwei5163"
] | houwei5163 |
b645ed1a0ad19262304bef16a69381cbb05cbc2c | 4a211e279ec89239033c5fe2d6d8d3e49b48d369 | /salvo/src/lib/job_control_loader.py | d179d460ec8b996e850b26e0c4f04fbb774d9d79 | [
"Apache-2.0"
] | permissive | envoyproxy/envoy-perf | cfb1e8f7af806600f11ebc235c1a72939420b087 | d131bc2f1a7f8ae4f640da30fd30c027735d9788 | refs/heads/main | 2023-08-31T14:02:50.891888 | 2023-08-24T16:19:26 | 2023-08-24T16:19:26 | 94,845,161 | 109 | 29 | Apache-2.0 | 2023-08-24T16:19:28 | 2017-06-20T03:20:02 | Python | UTF-8 | Python | false | false | 3,111 | py | """This object abstracts the loading of json strings into protobuf objects."""
import json
import logging
import yaml
from google.protobuf import json_format
import api.control_pb2 as proto_control
log = logging.getLogger(__name__)
def _load_json_doc(filename: str) -> proto_control.JobControl:
"""Load a disk file as JSON.
This function reads the specified filename and parses the contents
as JSON.
Args:
filename: The file whose contents are to be read as JSON data
Returns:
A JobControl object populated with the contents from the
specified JSON file
"""
contents = None
log.debug(f"Opening JSON file {filename}")
try:
with open(filename, 'r') as json_doc:
contents = json_format.Parse(json_doc.read(), proto_control.JobControl())
except FileNotFoundError as file_not_found:
log.exception(f"Unable to load {filename}: {file_not_found}")
except json_format.Error as json_parse_error:
log.exception(f"Unable to parse JSON contents {filename}: {json_parse_error}")
return contents
def _load_yaml_doc(filename: str) -> proto_control.JobControl:
"""Load a disk file as YAML.
This function reads the specified filename and parses the contents
as YAML.
Args:
filename: The file whose contents are to be read as YAML data
Returns:
A JobControl object populated with the contents from the
specified YAML file
"""
log.debug(f"Opening YAML file {filename}")
contents = None
try:
with open(filename, 'r') as yaml_doc:
contents = yaml.safe_load(yaml_doc.read())
contents = json_format.Parse(json.dumps(contents), proto_control.JobControl())
except FileNotFoundError as file_not_found:
log.exception(f"Unable to load {filename}: {file_not_found}")
except json_format.Error as yaml_parse_error:
log.exception(f"Unable to parse YAML contents {filename}: {yaml_parse_error}")
return contents
def load_control_doc(filename: str) -> proto_control.JobControl:
"""Return a JobControl object from the identified filename.
This function uses the extension of the specified file to read its
contents as YAML or JSON
Args:
filename: The file whose contents are to be read and parsed as
a Job Control object.
Returns:
A JobControl object populated with the contents from the
specified filename
"""
contents = None
# Try loading the contents based on the file extension
if filename.endswith('.json'):
log.debug(f"Loading JSON file {filename}")
return _load_json_doc(filename)
elif filename.endswith('.yaml'):
log.debug(f"Loading YAML file {filename}")
return _load_yaml_doc(filename)
else:
log.debug(f"Auto-detecting contents of {filename}")
# Attempt to autodetect the contents
try:
contents = _load_json_doc(filename)
except json_format.Error:
log.info(f"Parsing {filename} as JSON failed. Trying YAML")
if not contents:
try:
contents = _load_yaml_doc(filename)
except json_format.Error:
log.info(f"Parsing {filename} as YAML failed.")
return contents
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.