blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
534a4e3a572af24eb566340df6d9ff2f6a779074
|
e1dd0997239951d4d459b1ba0229493512b0b331
|
/mds_py/mds-env/lib/python3.11/site-packages/cleo/io/inputs/token_parser.py
|
1b0f702287034dd015189b66f64297578fa1aa06
|
[] |
no_license
|
alexmy21/Octopus
|
bd17777cf66654c1e7959654f63ca82b716865b5
|
7844ec616376ec6cd9c1a8b73dbcad9c729557ae
|
refs/heads/master
| 2022-12-22T22:42:29.473433 | 2022-12-21T16:52:09 | 2022-12-21T16:52:09 | 61,543,002 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,937 |
py
|
from typing import List
from typing import Optional
class TokenParser(object):
"""
Parses tokens from a string passed to StringArgs.
"""
def __init__(self) -> None:
self._string = "" # type: str
self._cursor = 0 # type: int
self._current = None # type: Optional[str]
self._next_ = None # type: Optional[str]
def parse(self, string: str) -> List[str]:
self._string = string
self._cursor = 0
self._current = None
if len(string) > 0:
self._current = string[0]
self._next_ = None
if len(string) > 1:
self._next_ = string[1]
tokens = self._parse()
return tokens
def _parse(self) -> List[str]:
tokens = []
while self._is_valid():
if self._current.isspace():
# Skip spaces
self._next()
continue
if self._is_valid():
tokens.append(self._parse_token())
return tokens
def _is_valid(self) -> bool:
return self._current is not None
def _next(self) -> None:
"""
Advances the cursor to the next position.
"""
if not self._is_valid():
return
self._cursor += 1
self._current = self._next_
if self._cursor + 1 < len(self._string):
self._next_ = self._string[self._cursor + 1]
else:
self._next_ = None
def _parse_token(self) -> str:
token = ""
while self._is_valid():
if self._current.isspace():
self._next()
break
if self._current == "\\":
token += self._parse_escape_sequence()
elif self._current in ["'", '"']:
token += self._parse_quoted_string()
else:
token += self._current
self._next()
return token
def _parse_quoted_string(self) -> str:
string = ""
delimiter = self._current
# Skip first delimiter
self._next()
while self._is_valid():
if self._current == delimiter:
# Skip last delimiter
self._next()
break
if self._current == "\\":
string += self._parse_escape_sequence()
elif self._current == '"':
string += '"{}"'.format(self._parse_quoted_string())
elif self._current == "'":
string += "'{}'".format(self._parse_quoted_string())
else:
string += self._current
self._next()
return string
def _parse_escape_sequence(self) -> str:
if self._next_ in ['"', "'"]:
sequence = self._next_
else:
sequence = "\\" + self._next_
self._next()
self._next()
return sequence
|
[
"[email protected]"
] | |
a4cab5589801580f49853730f9ebb47d44b78a6a
|
dc7dc1ab85403a4467044d4c0c936c17fff5225a
|
/fstmerge/examples/Fail2ban/rev579-732/base-trunk-579/server/datetemplate.py
|
a495df29c03ed39cdbb248484605dae12d27a53e
|
[] |
no_license
|
RoDaniel/featurehouse
|
d2dcb5f896bbce2c5154d0ba5622a908db4c5d99
|
df89ce54ddadfba742508aa2ff3ba919a4a598dc
|
refs/heads/master
| 2020-12-25T13:45:44.511719 | 2012-01-20T17:43:15 | 2012-01-20T17:43:15 | 1,919,462 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,020 |
py
|
__author__ = "Cyril Jaquier"
__version__ = "$Revision: 1.1 $"
__date__ = "$Date: 2010-07-25 12:46:34 $"
__copyright__ = "Copyright (c) 2004 Cyril Jaquier"
__license__ = "GPL"
import re
class DateTemplate:
def __init__(self):
self.__name = ""
self.__regex = ""
self.__cRegex = None
self.__pattern = ""
self.__hits = 0
def setName(self, name):
self.__name = name
def getName(self):
return self.__name
def setRegex(self, regex):
self.__regex = regex.strip()
self.__cRegex = re.compile(regex)
def getRegex(self):
return self.__regex
def setPattern(self, pattern):
self.__pattern = pattern.strip()
def getPattern(self):
return self.__pattern
def isValid(self):
return self.__regex != "" and self.__pattern != ""
def incHits(self):
self.__hits = self.__hits + 1
def getHits(self):
return self.__hits
def matchDate(self, line):
dateMatch = self.__cRegex.search(line)
return dateMatch
def getDate(self, line):
raise Exception("matchDate() is abstract")
|
[
"joliebig"
] |
joliebig
|
d57b61b9f136e1ff89cbd1756e5f76c76a7fb80f
|
e43ff8f429a6938a4f16edc4b2c94976acbff157
|
/ABC/ABC 168/b.py
|
735be34191c1e1479524c1884243fefb41cf2ea4
|
[] |
no_license
|
Soule50431/AtCoder
|
4fcd6ab6b771d55c90dc62aedd75eb81fd067466
|
118ac5d03630ce143fb50153402eee38e988ae0c
|
refs/heads/master
| 2023-06-18T13:07:13.843361 | 2021-07-14T01:56:20 | 2021-07-14T01:56:20 | 357,827,698 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 89 |
py
|
k = int(input())
s = input()
if len(s) <= k:
print(s)
else:
print(s[:k] + "...")
|
[
"[email protected]"
] | |
9aa06aefad0eb809be1ca802dc63eaa0fc7b6b52
|
74a8503870c091dfc02d0772be71bd8eed508835
|
/data_refinitiv_host_1_qa/subscribe_data_FVc1.py
|
6078f332f8515098d078e0135969d42bf96e70d7
|
[] |
no_license
|
xiaorensean/Aggregated-Exchanges-App
|
3c212c4dfc396276c4604194936b5ed11bbd099b
|
070bf23c057621910589881542673d0e02691c9e
|
refs/heads/master
| 2023-02-25T08:20:14.435706 | 2021-01-30T15:18:34 | 2021-01-30T15:18:34 | 255,150,488 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 17,473 |
py
|
import sys
import os
import time
import requests
import socket
import json
import websocket
import threading
import traceback
current_dir = os.path.dirname(os.path.abspath(__file__))
pkg_dir = os.path.dirname(current_dir)
sys.path.append(pkg_dir)
from influxdb_client.influxdb_client_host_1 import InfluxClientHost1
from influxdb_client.influxdb_client_host_2 import InfluxClientHost2
from influxdb_client.influxdb_client_qa_host_1 import InfluxClientHostQA1
from utility.error_logger_writer import logger
host_1 = InfluxClientHostQA1()
host_2 = InfluxClientHost2()
# Global Default Variables
app_id = '256'
#app_id = '2bd36ad5581c4ff0b9914641a40480316ab28105'
auth_url = 'https://api.refinitiv.com:443/auth/oauth2/v1/token'
discovery_url = 'https://api.refinitiv.com/streaming/pricing/v1/'
password = 'k-!\"*dSg6(%[<vntbMLDd\\*aF]=p\"AWcAyQ^Bf>\\'
position = ''
sts_token = ''
refresh_token = ''
user = 'GE-A-01669631-3-3373'
clientid = '2bd36ad5581c4ff0b9914641a40480316ab28105'
client_secret = ''
scope = 'trapi'
region = 'amer'
service = 'ELEKTRON_DD'
hostList = []
hotstandby = False
# Global Variables
session2 = None
original_expire_time = '0';
class WebSocketSession:
logged_in = False
session_name = ''
web_socket_app = None
web_socket_open = False
host = ''
disconnected_by_user = False
def __init__(self, name, host, symbol):
self.session_name = name
self.host = host
self.ric = symbol
def _send_data_request(self, ric_name):
""" Create and send simple Market Price request """
mp_req_json = {
"ID":2,
"Key":{
"Name":ric_name
},
}
self.web_socket_app.send(json.dumps(mp_req_json))
print("SENT on " + self.session_name + ":")
print(json.dumps(mp_req_json, sort_keys=True, indent=2, separators=(',', ':')))
def _send_login_request(self, auth_token, is_refresh_token):
"""
Send login request with authentication token.
Used both for the initial login and subsequent reissues to update the authentication token
"""
login_json = {
'ID': 1,
'Domain': 'Login',
'Key': {
'NameType': 'AuthnToken',
'Elements': {
'ApplicationId': '',
'Position': '',
'AuthenticationToken': ''
}
}
}
login_json['Key']['Elements']['ApplicationId'] = app_id
login_json['Key']['Elements']['Position'] = position
login_json['Key']['Elements']['AuthenticationToken'] = auth_token
# If the token is a refresh token, this is not our first login attempt.
if is_refresh_token:
login_json['Refresh'] = False
self.web_socket_app.send(json.dumps(login_json))
print("SENT on " + self.session_name + ":")
print(json.dumps(login_json, sort_keys=True, indent=2, separators=(',', ':')))
def _process_login_response(self, message_json):
""" Send item request """
if message_json['State']['Stream'] != "Open" or message_json['State']['Data'] != "Ok":
print("Login failed.")
sys.exit(1)
self.logged_in = True
self._send_data_request(self.ric)
def _process_message(self, message_json):
""" Parse at high level and output JSON of message """
message_type = message_json['Type']
if message_type == "Refresh":
if 'Domain' in message_json:
message_domain = message_json['Domain']
if message_domain == "Login":
self._process_login_response(message_json)
elif message_type == "Ping":
pong_json = {'Type': 'Pong'}
self.web_socket_app.send(json.dumps(pong_json))
print("SENT on " + self.session_name + ":")
print(json.dumps(pong_json, sort_keys=True, indent=2, separators=(',', ':')))
def _write_market_data(self,data, dt):
ticker = self.ric
if "=" in ticker:
ticker = ticker.replace("=","_")
else:
pass
measurement = "refinitiv_" + dt + "_" + ticker
fields = data
# write everything as float
for key, value in fields.items():
if type(value) == int:
fields[key] = float(value)
fields.update({"is_api_return_timestamp": True})
dbtime = False
tags = {}
tags.update({"symbol":self.ric})
host_1.write_points_to_measurement(measurement,dbtime,tags,fields)
def _write_vwap(self,data):
ticker = self.ric
if "=" in ticker:
ticker = ticker.replace("=", "_")
else:
pass
measurement = "refinitiv_Trade" + "_" + ticker + "_1m"
fields = {}
fields.update({"VWAP":data['VWAP']})
fields.update({"is_api_return_timestamp": True})
dbtime = False
tags = {}
tags.update({"symbol": self.ric})
host_1.write_points_to_measurement(measurement, dbtime, tags, fields)
# Callback events from WebSocketApp
def _on_message(self, message):
""" Called when message received, parse message into JSON for processing """
print("RECEIVED on " + self.session_name + ":")
message_json = json.loads(message)
data = json.dumps(message_json, sort_keys=True, indent=2, separators=(',', ':'))
for singleMsg in message_json:
print(singleMsg)
try:
#print(singleMsg['UpdateType'], singleMsg['Fields'])
data = singleMsg['Fields']
data_type = singleMsg['UpdateType']
if data_type == "Trade":
try:
self._write_vwap(data)
except KeyError:
pass
except:
error = traceback.format_exc()
print(error)
measurement = "refinitiv_Trade" + "_" + self.ric + "_1m"
logger(measurement, error, self.ric)
else:
pass
try:
self._write_market_data(data, data_type)
except:
error = traceback.format_exc()
print(error)
measurement = "refinitiv_" + data_type + "_" + self.ric
logger(measurement, error, self.ric)
except:
pass
self._process_message(singleMsg)
def _on_error(self, error):
""" Called when websocket error has occurred """
print(error + " for " + self.session_name)
def _on_close(self):
""" Called when websocket is closed """
self.web_socket_open = False
self.logged_in = False
print("WebSocket Closed for " + self.session_name)
if not self.disconnected_by_user:
print("Reconnect to the endpoint for " + self.session_name + " after 3 seconds... ")
time.sleep(3)
self.connect()
def _on_open(self):
""" Called when handshake is complete and websocket is open, send login """
print("WebSocket successfully connected for " + self.session_name + "!")
self.web_socket_open = True
self._send_login_request(sts_token, False)
# Operations
def connect(self):
# Start websocket handshake
ws_address = "wss://{}/WebSocket".format(self.host)
print("Connecting to WebSocket " + ws_address + " for " + self.session_name + "...")
self.web_socket_app = websocket.WebSocketApp(ws_address, on_message=self._on_message,
on_error=self._on_error,
on_close=self._on_close,
subprotocols=['tr_json2'])
self.web_socket_app.on_open = self._on_open
# Event loop
wst = threading.Thread(target=self.web_socket_app.run_forever, kwargs={'sslopt': {'check_hostname': False}})
wst.start()
def disconnect(self):
print("Closing the WebSocket connection for " + self.session_name)
self.disconnected_by_user = True
if self.web_socket_open:
self.web_socket_app.close()
def refresh_token(self):
if self.logged_in:
print("Refreshing the access token for " + self.session_name)
self._send_login_request(sts_token, True)
def query_service_discovery(url=None):
if url is None:
url = discovery_url
print("Sending EDP-GW service discovery request to " + url)
try:
r = requests.get(url, headers={"Authorization": "Bearer " + sts_token}, params={"transport": "websocket"}, allow_redirects=False)
except requests.exceptions.RequestException as e:
print('EDP-GW service discovery exception failure:', e)
return False
if r.status_code == 200:
# Authentication was successful. Deserialize the response.
response_json = r.json()
print("EDP-GW Service discovery succeeded. RECEIVED:")
print(json.dumps(response_json, sort_keys=True, indent=2, separators=(',', ':')))
for index in range(len(response_json['services'])):
if region == "amer":
if not response_json['services'][index]['location'][0].startswith("us-"):
continue
elif region == "emea":
if not response_json['services'][index]['location'][0].startswith("eu-"):
continue
elif region == "apac":
if not response_json['services'][index]['location'][0].startswith("ap-"):
continue
if not hotstandby:
if len(response_json['services'][index]['location']) == 2:
hostList.append(response_json['services'][index]['endpoint'] + ":" +
str(response_json['services'][index]['port']))
break
else:
if len(response_json['services'][index]['location']) == 1:
hostList.append(response_json['services'][index]['endpoint'] + ":" +
str(response_json['services'][index]['port']))
if hotstandby:
if len(hostList) < 2:
print("hotstandby support requires at least two hosts")
sys.exit(1)
else:
if len(hostList) == 0:
print("No host found from EDP service discovery")
sys.exit(1)
return True
elif r.status_code == 301 or r.status_code == 302 or r.status_code == 303 or r.status_code == 307 or r.status_code == 308:
# Perform URL redirect
print('EDP-GW service discovery HTTP code:', r.status_code, r.reason)
new_host = r.headers['Location']
if new_host is not None:
print('Perform URL redirect to ', new_host)
return query_service_discovery(new_host)
return False
elif r.status_code == 403 or r.status_code == 451:
# Stop trying with the request
print('EDP-GW service discovery HTTP code:', r.status_code, r.reason)
print('Stop trying with the request')
return False
else:
# Retry the service discovery request
print('EDP-GW service discovery HTTP code:', r.status_code, r.reason)
print('Retry the service discovery request')
return query_service_discovery()
def get_sts_token(current_refresh_token, url=None):
"""
Retrieves an authentication token.
:param current_refresh_token: Refresh token retrieved from a previous authentication, used to retrieve a
subsequent access token. If not provided (i.e. on the initial authentication), the password is used.
"""
if url is None:
url = auth_url
if not current_refresh_token: # First time through, send password
if url.startswith('https'):
data = {'username': user, 'password': password, 'grant_type': 'password', 'takeExclusiveSignOnControl': True,
'scope': scope}
else:
data = {'username': user, 'password': password, 'client_id': clientid, 'grant_type': 'password', 'takeExclusiveSignOnControl': True,
'scope': scope}
print("Sending authentication request with password to", url, "...")
else: # Use the given refresh token
if url.startswith('https'):
data = {'username': user, 'refresh_token': current_refresh_token, 'grant_type': 'refresh_token'}
else:
data = {'username': user, 'client_id': clientid, 'refresh_token': current_refresh_token, 'grant_type': 'refresh_token'}
print("Sending authentication request with refresh token to", url, "...")
try:
if url.startswith('https'):
# Request with auth for https protocol
r = requests.post(url,
headers={'Accept': 'application/json'},
data=data,
auth=(clientid, client_secret),
verify=True,
allow_redirects=False)
else:
# Request without auth for non https protocol (e.g. http)
r = requests.post(url,
headers={'Accept': 'application/json'},
data=data,
verify=True,
allow_redirects=False)
except requests.exceptions.RequestException as e:
print('EDP-GW authentication exception failure:', e)
return None, None, None
if r.status_code == 200:
auth_json = r.json()
print("EDP-GW Authentication succeeded. RECEIVED:")
print(json.dumps(auth_json, sort_keys=True, indent=2, separators=(',', ':')))
return auth_json['access_token'], auth_json['refresh_token'], auth_json['expires_in']
elif r.status_code == 301 or r.status_code == 302 or r.status_code == 307 or r.status_code == 308:
# Perform URL redirect
print('EDP-GW authentication HTTP code:', r.status_code, r.reason)
new_host = r.headers['Location']
if new_host is not None:
print('Perform URL redirect to ', new_host)
return get_sts_token(current_refresh_token, new_host)
return None, None, None
elif r.status_code == 400 or r.status_code == 401:
# Retry with username and password
print('EDP-GW authentication HTTP code:', r.status_code, r.reason)
if current_refresh_token:
# Refresh token may have expired. Try using our password.
print('Retry with username and password')
return get_sts_token(None)
return None, None, None
elif r.status_code == 403 or r.status_code == 451:
# Stop retrying with the request
print('EDP-GW authentication HTTP code:', r.status_code, r.reason)
print('Stop retrying with the request')
return None, None, None
else:
# Retry the request to the API gateway
print('EDP-GW authentication HTTP code:', r.status_code, r.reason)
print('Retry the request to the API gateway')
return get_sts_token(current_refresh_token)
if __name__ == "__main__":
ric = "FVc1"
if position == '':
# Populate position if possible
try:
position_host = socket.gethostname()
position = socket.gethostbyname(position_host) + "/" + position_host
except socket.gaierror:
position = "127.0.0.1/net"
sts_token, refresh_token, expire_time = get_sts_token(None)
if not sts_token:
sys.exit(1)
original_expire_time = expire_time
# Query VIPs from EDP service discovery
if not query_service_discovery():
print("Failed to retrieve endpoints from EDP Service Discovery. Exiting...")
sys.exit(1)
# Start websocket handshake; create two sessions when the hotstandby parameter is specified.
session1 = WebSocketSession("session1", hostList[0], ric)
session1.connect()
if hotstandby:
session2 = WebSocketSession("session2", hostList[1],ric)
session2.connect()
try:
while True:
# Continue using current token until 90% of initial time before it expires.
time.sleep(int(float(expire_time) * 0.90))
sts_token, refresh_token, expire_time = get_sts_token(refresh_token)
if not sts_token:
sys.exit(1)
if int(expire_time) != int(original_expire_time):
print('expire time changed from ' + str(original_expire_time) + ' sec to ' + str(expire_time) + ' sec; retry with password')
sts_token, refresh_token, expire_time = get_sts_token(None)
if not sts_token:
sys.exit(1)
original_expire_time = expire_time
# Update token.
session1.refresh_token()
if hotstandby:
session2.refresh_token()
except KeyboardInterrupt:
session1.disconnect()
if hotstandby:
session2.disconnect()
|
[
"[email protected]"
] | |
e6ff9ac5d8d9fb92b4548b2d325b3f5ba5aa2e09
|
dfc827bf144be6edf735a8b59b000d8216e4bb00
|
/CODE/experimentcode/FundamentalCode/SerrecentFD/Run.py
|
84b4f975d1b28d247108e897c317804838aed81d
|
[] |
no_license
|
jordanpitt3141/ALL
|
c5f55e2642d4c18b63b4226ddf7c8ca492c8163c
|
3f35c9d8e422e9088fe096a267efda2031ba0123
|
refs/heads/master
| 2020-07-12T16:26:59.684440 | 2019-05-08T04:12:26 | 2019-05-08T04:12:26 | 94,275,573 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 17,738 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 22 10:09:21 2015
@author: jordan
"""
from Serre2FDC import *
from scipy import *
import csv
import os
from numpy.linalg import norm
def copyarraytoC(a):
n = len(a)
b = mallocPy(n)
for i in range(n):
writetomem(b,i,a[i])
return b
def copyarrayfromC(a,n):
b = [0]*n
for i in range(n):
b[i] = readfrommem(a,i)
return b
def makevar(sx,ex,dx,st,et,dt):
x = arange(sx, ex, dx)
t = arange(st, et, dt)
return x,t
def dambreak(x,hf,hc,hl,dx):
n = len(x)
h = zeros(n)
u = zeros(n)
for i in range(n):
if (x[i] < hc):
h[i] = hf
else:
h[i] = hl
return h,u
def dambreaksmooth(x,x0,base,eta0,diffuse,dx):
from numpy import tanh
n = len(x)
h = zeros(n)
u = zeros(n)
for i in range(n):
h[i] = base + 0.5*eta0*(1 + tanh(diffuse*(x0 -x[i])))
return h,u
def sech2 (x):
a = 2./(exp(x) + exp(-x))
return a*a
def soliton (x,t,g,a0,a1):
c = sqrt(g*(a0 + a1))
phi = x - c*t;
k = sqrt(3.0*a1) / (2.0*a0 *sqrt(a0 + a1))
return a0 + a1*sech2(k*phi)
def solitoninit(n,a0,a1,g,x,t0,dx):
h = zeros(n)
u = zeros(n)
c = sqrt(g*(a0 + a1))
for i in range(n):
h[i] = soliton(x[i],t0,g,a0,a1)
u[i] = c* ((h[i] - a0) / h[i])
return h,u
def soliton2interactinit(n,a0,a11,solbeg1,solend1,direction1,a12,solbeg2,solend2,direction2,g,x,t0,dx):
h = zeros(n)
u = zeros(n)
c1 = sqrt(g*(a0 + a11))
c2 = sqrt(g*(a0 + a11))
for i in range(n):
if (x[i] > solbeg1 and x[i] < solend1):
h[i] = soliton(abs(x[i] - 0.5*(solbeg1 + solend1)),t0,g,a0,a11)
u[i] = direction1*c1*( (h[i] - a0) / h[i] )
elif (x[i] > solbeg2 and x[i] < solend2):
h[i] = soliton(abs(x[i] - 0.5*(solbeg2 + solend2)),t0,g,a0,a12)
u[i] = direction2*c2* ((h[i] - a0) / h[i])
else:
h[i] = a0
u[i] = 0.0
return h,u
def experiment1(x,h0,h1,dx):
n = len(x)
u = zeros(n)
h = ones(n)*h1
for i in range(n):
if (x[i] <0 and x[i] > -2*b):
h[i] = h0
return h,u
"""
#solitonaccuracy
wdir = "../../../data/raw/FDreredo/FDcent/"
if not os.path.exists(wdir):
os.makedirs(wdir)
s = wdir + "savenorms.txt"
with open(s,'a') as file1:
writefile = csv.writer(file1, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile.writerow(['dx','Normalised L1-norm Difference Height', ' Normalised L1-norm Difference Velocity', 'Eval Error'])
for k in range(6,21):
dx = 100.0 / (2**k)
Cr = 0.5
g = 9.81
a0 = 1.0
a1 = 0.7
l = 1.0 / (sqrt(g*(a0 + a1)))
dt = Cr*l*dx
startx = -250.0
endx = 250.0 + dx
startt = 0
endt = 50 + dt
x,t = makevar(startx,endx,dx,startt,endt,dt)
n = len(x)
gap = max(5,int(0.5/dt))
h,u = solitoninit(n,a0,a1,g,x,0.0,dx)
ph,pu = solitoninit(n,a0,a1,g,x,-dt,dx)
nBC = 3
niBC = nBC
nBCs = 4
u0 = zeros(nBCs)
u1 = zeros(nBCs)
h0 = a0*ones(nBCs)
h1 = a0*ones(nBCs)
h_c = copyarraytoC(h)
u_c = copyarraytoC(u)
pubc_c = copyarraytoC(concatenate([u0[-nBC:],pu,u1[:nBC]]))
phbc_c = copyarraytoC(concatenate([h0[-nBC:],ph,h1[:nBC]]))
h0_c = copyarraytoC(h0)
h1_c = copyarraytoC(h1)
u0_c = copyarraytoC(u0)
u1_c = copyarraytoC(u1)
xbeg = arange(startx - niBC*dx,startx,dx)
xend = arange(endx + dx,endx + (niBC+1)*dx)
xbc = concatenate([xbeg,x,xend])
xbc_c = copyarraytoC(xbc)
hbc_c = mallocPy(n + 2*niBC)
ubc_c = mallocPy(n + 2*niBC)
Evals = []
conc(h0_c , h_c,h1_c,niBC,n ,niBC , hbc_c)
conc(u0_c , u_c,u1_c,niBC,n ,niBC , ubc_c)
Evali = HankEnergyall(xbc_c,hbc_c,ubc_c,g,n + 2*niBC,niBC,dx)
for i in range(1,len(t)):
evolvewrap(u_c, h_c, pubc_c,phbc_c, h0_c, h1_c,u0_c, u1_c,g,dx,dt,nBC, n,nBCs)
print (t[i])
conc(h0_c , h_c,h1_c,niBC,n ,niBC , hbc_c)
conc(u0_c , u_c,u1_c,niBC,n ,niBC , ubc_c)
Evalf = HankEnergyall(xbc_c,hbc_c,ubc_c,g,n + 2*niBC,niBC,dx)
u = copyarrayfromC(u_c,n)
h = copyarrayfromC(h_c,n)
he,ue = solitoninit(n,a0,a1,g,x,t[i],dx)
if not os.path.exists(wdir+ str(k) + "/"):
os.makedirs(wdir+ str(k) + "/")
s = wdir+ str(k) + "/" + "outlast.txt"
with open(s,'a') as file2:
writefile2 = csv.writer(file2, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile2.writerow(['dx' ,'dt','time','Evali','Evalf','Eval Error','cell midpoint', 'height(m)', 'u(m/s)',"he","ue"])
for j in range(n):
writefile2.writerow([str(dx),str(dt),str(t[i]), str(Evali), str(Evalf), str(abs(Evali - Evalf)/ abs(Evali)), str(x[j]), str(h[j]) , str(u[j]), str(he[j]),str(ue[j])])
normhdiffi = norm(h - he,ord=1) / norm(he,ord=1)
normudiffi = norm(u -ue,ord=1) / norm(ue,ord=1)
s = wdir + "savenorms.txt"
with open(s,'a') as file1:
writefile = csv.writer(file1, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile.writerow([str(dx),str(normhdiffi), str(normudiffi), str(abs(Evali - Evalf)/ abs(Evali))])
deallocPy(u_c)
deallocPy(h_c)
deallocPy(h0_c)
deallocPy(h1_c)
deallocPy(u0_c)
deallocPy(u1_c)
"""
"""
##Soliton
wdir = "../../data/t/"
dx = 0.1
l = 0.01
dt = l*dx
startx = -500.0
endx = 1500.0 + dx
startt = 0.0
endt = 10 + dt
g = 9.81
x,t = makevar(startx,endx,dx,startt,endt,dt)
n = len(x)
gap = 10**100#max(5,int(0.5/dt))
a0 = 10.0
a1 = 1.0
h,u = solitoninit(n,a0,a1,g,x,0.0,dx)
ph,pu = solitoninit(n,a0,a1,g,x,-dt,dx)
nBC = 3
nBCs = 4
u0 = zeros(nBCs)
u1 = zeros(nBCs)
h0 = a0*ones(nBCs)
h1 = a0*ones(nBCs)
h_c = copyarraytoC(h)
u_c = copyarraytoC(u)
pubc_c = copyarraytoC(concatenate([u0[-nBC:],pu,u1[:nBC]]))
phbc_c = copyarraytoC(concatenate([h0[-nBC:],ph,h1[:nBC]]))
h0_c = copyarraytoC(h0)
h1_c = copyarraytoC(h1)
u0_c = copyarraytoC(u0)
u1_c = copyarraytoC(u1)
for i in range(1,len(t)):
if(i % gap == 0 or i ==1):
u = copyarrayfromC(u_c,n)
h = copyarrayfromC(h_c,n)
he,ue = solitoninit(n,10.0,1.0,g,x,t[i],dx)
s = wdir + "saveoutputts" + str(i) + ".txt"
with open(s,'a') as file2:
writefile2 = csv.writer(file2, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile2.writerow(['dx' ,'dt','time','cell midpoint', 'height(m)', 'u(m/s)',"he","ue"])
for j in range(n):
writefile2.writerow([str(dx),str(dt),str(t[i]), str(x[j]), str(h[j]) , str(u[j]), str(he[j]),str(ue[j])])
evolvewrap(u_c, h_c, pubc_c,phbc_c , h0_c, h1_c,u0_c, u1_c,g,dx,dt,nBC, n,nBCs)
print (t[i])
u = copyarrayfromC(u_c,n)
h = copyarrayfromC(h_c,n)
he,ue = solitoninit(n,a0,a1,g,x,t[i],dx)
s = wdir + "outlast.txt"
with open(s,'a') as file2:
writefile2 = csv.writer(file2, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile2.writerow(['dx' ,'dt','time','cell midpoint', 'height(m)', 'u(m/s)',"he","ue"])
for j in range(n):
writefile2.writerow([str(dx),str(dt),str(t[i]), str(x[j]), str(h[j]) , str(u[j]), str(he[j]),str(ue[j])])
deallocPy(u_c)
deallocPy(h_c)
deallocPy(h0_c)
deallocPy(h1_c)
deallocPy(u0_c)
deallocPy(u1_c)
"""
"""
##Soliton Collision
wdir = "../../../data/raw/Cserre/solitonothers/collDMcopyhh/FDc/"
if not os.path.exists(wdir):
os.makedirs(wdir)
dx = 0.01
a0 = 1.0
a11 = 0.96
solbeg1 = 100.0
solend1 = 200.0
direction1 = 1.0
a12 = 0.96
solbeg2 = 200.0
solend2 = 300.0
direction2 = -1.0
Cr = 0.5
#g = 9.81
g = 1.0
#l = Cr / (sqrt(g*1.5*(a0 + a11 + a12)))
dt = 0.1*dx
startx = 0.0
endx = 400.0
startt = 0.0
endt = 150 + dt
x,t = makevar(startx,endx,dx,startt,endt,dt)
n = len(x)
t0 = 0
gap = int(0.5/dt)
h,u = soliton2interactinit(n,a0,a11,solbeg1,solend1,direction1,a12,solbeg2,solend2,direction2,g,x,t0,dx)
ph,pu = soliton2interactinit(n,a0,a11,solbeg1,solend1,direction1,a12,solbeg2,solend2,direction2,g,x,-dt,dx)
nBC = 3
nBCs = 4
u0 = zeros(nBCs)
u1 = zeros(nBCs)
h0 = a0*ones(nBCs)
h1 = a0*ones(nBCs)
h_c = copyarraytoC(h)
u_c = copyarraytoC(u)
pubc_c = copyarraytoC(concatenate([u0[-nBC:],pu,u1[:nBC]]))
phbc_c = copyarraytoC(concatenate([h0[-nBC:],ph,h1[:nBC]]))
h0_c = copyarraytoC(h0)
h1_c = copyarraytoC(h1)
u0_c = copyarraytoC(u0)
u1_c = copyarraytoC(u1)
for i in range(1,len(t)):
if(i % gap == 0 or i ==1):
u = copyarrayfromC(u_c,n)
h = copyarrayfromC(h_c,n)
s = wdir + "saveoutputts" + str(i) + ".txt"
with open(s,'a') as file2:
writefile2 = csv.writer(file2, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile2.writerow(['dx' ,'dt','time','cell midpoint', 'height(m)', 'u(m/s)'])
for j in range(n):
writefile2.writerow([str(dx),str(dt),str(t[i]), str(x[j]), str(h[j]) , str(u[j])])
evolvewrap(u_c, h_c, pubc_c,phbc_c , h0_c, h1_c,u0_c, u1_c,g,dx,dt,nBC, n,nBCs)
print (t[i])
u = copyarrayfromC(u_c,n)
h = copyarrayfromC(h_c,n)
s = wdir + "saveoutputtslast.txt"
with open(s,'a') as file2:
writefile2 = csv.writer(file2, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile2.writerow(['dx' ,'dt','time','cell midpoint', 'height(m)', 'u(m/s)'])
for j in range(n):
writefile2.writerow([str(dx),str(dt),str(t[i]), str(x[j]), str(h[j]) , str(u[j])])
deallocPy(u_c)
deallocPy(h_c)
deallocPy(h0_c)
deallocPy(h1_c)
deallocPy(u0_c)
deallocPy(u1_c)
"""
"""
## DAM BREAK Smooth ##########################
wdir = "../../data/raw/longtimedambreakDTfix/FDc/"
if not os.path.exists(wdir):
os.makedirs(wdir)
dx = 10.0 /(2**9)
l = 0.01
dt = l*dx*dx
startx = -900
endx = 1800.0 + dx
startt = 0.0
endt = 300 + dt
g = 9.81
x,t = makevar(startx,endx,dx,startt,endt,dt)
n = len(x)
bot = 0.0
hf = 1.8
hl = 1.0
gap = int(0.5/dt)
diffuse = 10
base = hl
eta0 = hf - hl
x0 = 500
h,u = dambreaksmooth(x,x0,base,eta0,diffuse,dx)
nBC = 3
nBCs = 4
u0 = zeros(nBCs)
u1 = zeros(nBCs)
h0 = hf*ones(nBCs)
h1 = hl*ones(nBCs)
h_c = copyarraytoC(h)
u_c = copyarraytoC(u)
pubc_c = copyarraytoC(concatenate([u0[-nBC:],u,u1[:nBC]]))
phbc_c = copyarraytoC(concatenate([u0[-nBC:],h,u1[:nBC]]))
h0_c = copyarraytoC(h0)
h1_c = copyarraytoC(h1)
u0_c = copyarraytoC(u0)
u1_c = copyarraytoC(u1)
aplus = []
aplusx = []
aplust = []
for i in range(1,len(t)):
if(i % gap == 0 or i ==1):
u = copyarrayfromC(u_c,n)
h = copyarrayfromC(h_c,n)
mi = n - 2
for mi in range(n-1,-1,-1):
if(h[mi -1] < h[mi]) and (h[mi] > 1.1 ):
break
aplus.append(h[mi])
aplusx.append(x[mi])
aplust.append(t[i])
s = wdir + "saveoutputts" + str(i) + ".txt"
with open(s,'a') as file2:
writefile2 = csv.writer(file2, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile2.writerow(['dx' ,'dt','time','cell midpoint', 'height(m)', 'u(m/s)'])
for j in range(n):
writefile2.writerow([str(dx),str(dt),str(t[i]), str(x[j]), str(h[j]) , str(u[j])])
evolvewrap(u_c, h_c, pubc_c,phbc_c, h0_c, h1_c,u0_c, u1_c,g,dx,dt,nBC, n,nBCs)
print (t[i])
u = copyarrayfromC(u_c,n)
h = copyarrayfromC(h_c,n)
mi = n - 2
for mi in range(n-1,-1,-1):
if(h[mi -1] < h[mi]) and (h[mi] > 1.1 ):
break
aplus.append(h[mi])
aplusx.append(x[mi])
aplust.append(t[i])
s = wdir + "outlast.txt"
with open(s,'a') as file2:
writefile2 = csv.writer(file2, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile2.writerow(['dx' ,'dt','time','cell midpoint', 'height(m)', 'u(m/s)'])
for j in range(n):
writefile2.writerow([str(dx),str(dt),str(t[i]), str(x[j]), str(h[j]) , str(u[j])])
s = wdir + "aplus.txt"
with open(s,'a') as file2:
writefile2 = csv.writer(file2, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile2.writerow(['x' ,'t','aplus' ,"Grim"])
for j in range(len(aplus)):
writefile2.writerow([str(aplusx[j]),str(aplust[j]),str(aplus[j]),str(0.739976603390100695296990254)])
deallocPy(u_c)
deallocPy(h_c)
deallocPy(h0_c)
deallocPy(h1_c)
deallocPy(u0_c)
deallocPy(u1_c)
"""
"""
#big smooth NEW
#TEST
#diffuses = [0.01,0.025,0.05,0.075,0.1,0.25,0.5,0.75,1.0,2.5,5.0,7.5,10.0,25.0,50.0,75.0,100.0,250.0,500.0,750.0,1000.0]
diffuses = [2]
wdirb = "../../data/bigsmoothTEST/FDc/"
for ll in range(9,10):
for k in range(len(diffuses)):
wdir = wdirb + str(ll) + "/" + str(k) + "/"
if not os.path.exists(wdir):
os.makedirs(wdir)
dx = 10.0 / (2**ll)
Cr = 0.5
g = 9.81
hf = 1.3
l = 1.0 / sqrt(g*hf)
dt = l*dx
startx = 0.0
endx = 1200.0 + dx
startt = 0.0
endt = 150.0+(dt*0.9)
x,t = makevar(startx,endx,dx,startt,endt,dt)
n = len(x)
bot = 0.0
hl = 1.0
gap = max(1,int(0.02/dt))
diffuse = diffuses[k]
base = hl
eta0 = hf - hl
x0 = 600
h,u = dambreaksmooth(x,x0,base,eta0,diffuse,dx)
nBC = 3
nBCs = 4
u0 = zeros(nBCs)
u1 = zeros(nBCs)
h0 = hf*ones(nBCs)
h1 = hl*ones(nBCs)
h_c = copyarraytoC(h)
u_c = copyarraytoC(u)
pubc_c = copyarraytoC(concatenate([u0[-nBC:],u,u1[:nBC]]))
phbc_c = copyarraytoC(concatenate([h0[-nBC:],h,h1[:nBC]]))
h0_c = copyarraytoC(h0)
h1_c = copyarraytoC(h1)
u0_c = copyarraytoC(u0)
u1_c = copyarraytoC(u1)
for i in range(1,len(t)):
evolvewrap(u_c, h_c, pubc_c,phbc_c, h0_c, h1_c,u0_c, u1_c,g,dx,dt,nBC, n,nBCs)
print (t[i])
u = copyarrayfromC(u_c,n)
h = copyarrayfromC(h_c,n)
s = wdir + "outlast.txt"
with open(s,'a') as file2:
writefile2 = csv.writer(file2, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile2.writerow(['dx' ,'dt','time','cell midpoint', 'height(m)', 'u(m/s)', 'diffuse'])
for j in range(n):
writefile2.writerow([str(dx),str(dt),str(t[i]), str(x[j]), str(h[j]) , str(u[j]), str(diffuse)])
deallocPy(u_c)
deallocPy(h_c)
deallocPy(h0_c)
deallocPy(h1_c)
deallocPy(u0_c)
deallocPy(u1_c)
"""
#big smooth targeted
difflist = [12]
deltaxa = [12,13,14]
dxlist = [deltaxa,deltaxa,deltaxa,deltaxa,deltaxa]
diffuses = [0.01,0.025,0.05,0.075,0.1,0.25,0.5,0.75,1.0,2.5,5.0,7.5,10.0,25.0,50.0,75.0,100.0,250.0,500.0,750.0,1000.0]
wdirb = "../../data/bigsmoothtargettedNEW1/FDcent/"
for lk in range(len(difflist)):
for ll in dxlist[lk]:
wdir = wdirb + str(ll) + "/" + str(difflist[lk]) + "/"
if not os.path.exists(wdir):
os.makedirs(wdir)
dx = (10.0 / (2**ll))
l = 0.01
dt = l*dx
startx = 0.0
endx = 1000.0 + dx
startt = 0.0
endt = 30.0+(dt*0.9)
g = 9.81
x,t = makevar(startx,endx,dx,startt,endt,dt)
n = len(x)
bot = 0.0
hf = 1.8
hl = 1.0
gap = max(1,int(0.02/dt))
diffuse = diffuses[difflist[lk]]
base = hl
eta0 = hf - hl
x0 = 500
h,u = dambreaksmooth(x,x0,base,eta0,diffuse,dx)
nBC = 3
nBCs = 4
u0 = zeros(nBCs)
u1 = zeros(nBCs)
h0 = hf*ones(nBCs)
h1 = hl*ones(nBCs)
h_c = copyarraytoC(h)
u_c = copyarraytoC(u)
pubc_c = copyarraytoC(concatenate([u0[-nBC:],u,u1[:nBC]]))
phbc_c = copyarraytoC(concatenate([h0[-nBC:],h,h1[:nBC]]))
h0_c = copyarraytoC(h0)
h1_c = copyarraytoC(h1)
u0_c = copyarraytoC(u0)
u1_c = copyarraytoC(u1)
for i in range(1,len(t)):
evolvewrap(u_c, h_c, pubc_c,phbc_c, h0_c, h1_c,u0_c, u1_c,g,dx,dt,nBC, n,nBCs)
print (t[i])
u = copyarrayfromC(u_c,n)
h = copyarrayfromC(h_c,n)
s = wdir + "outlast.txt"
with open(s,'a') as file2:
writefile2 = csv.writer(file2, delimiter = ',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
writefile2.writerow(['dx' ,'dt','time','cell midpoint', 'height(m)', 'u(m/s)', 'diffuse'])
for j in range(n):
writefile2.writerow([str(dx),str(dt),str(t[i]), str(x[j]), str(h[j]) , str(u[j]), str(diffuse)])
deallocPy(u_c)
deallocPy(h_c)
deallocPy(h0_c)
deallocPy(h1_c)
deallocPy(u0_c)
deallocPy(u1_c)
|
[
"[email protected]"
] | |
4211a9ccc3f5e8c8113247b6341eec5da41389f9
|
14ea45983fdcabe18f948ee45c388e10d825f53d
|
/SolarSystem.py
|
0237a9a3620a71e8edd2fee5698fa9789b946dd5
|
[] |
no_license
|
aodarc/Galileo
|
f4d6cbc3b8b3dbc9f0af70fe1874013d8d981b5d
|
f201a2ba3c560d893206ba29a1eefcf793025103
|
refs/heads/master
| 2020-12-28T20:30:31.899852 | 2016-01-14T10:29:27 | 2016-01-14T10:29:27 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,717 |
py
|
from tkinter import *
from time import sleep
from math import sqrt, pi, sin, cos
class Element(object):
_x = int()
_y = int()
_mass = float()
_is_movable = False
speed = 1.
img = 'atom50x50.png'
def __init__(self, root, mass=0.):
self._mass = mass
self._root = root
self.photo = PhotoImage(file='image/'+self.img)
self.body = Label(background='#0F2A33', image=self.photo)
self.body.image = self.photo # hz
self.body.pack()
def move(self, x=0, y=0):
if not self._is_movable:
return # later add raise
self.body.place_configure(x=x % 600, y=y % 600)
self._root.update()
self._x = x
self._y = y
class Sun(Element):
def __init__(self, root, mass=0):
self.img = 'sun50x50.png'
Element.__init__(self, root)
self._x = self._root.winfo_width()/2-25
self._y = self._root.winfo_height()/2-25
self.body.place_configure(x=self._x, y=self._y)
def main():
root = Tk()
root.title(u'Сонячна система')
root.configure(background='#0F2A33')
root.geometry('600x600+600+200')
root.minsize(600, 600)
root.maxsize(1000, 1000)
root.protocol('WM_DELETE_WINDOW', lambda: root.quit())
root.resizable(False, False) # розширення вікна по ширині і по висоті
root.update()
b = Sun(root)
b.speed = 10.
c = Element(root)
c._is_movable = True
c.speed = 2.
radius = 190
cs = [(275 + cos(pi*x/180) * radius, 275 + sin(pi*x/180) * radius) for x in range(360)]
for x, y in cs*4:
c.move(x, y)
sleep(0.001)
root.mainloop()
main()
|
[
"[email protected]"
] | |
c622faefc313d7d0db83653fc9d302fc1d646689
|
658ab464e9c796f819ad85f569ad06ab6e66992e
|
/src/oop/student.py
|
65c635d844e4dc672fcce29362f2c86fcd0be550
|
[] |
no_license
|
huowolf/python-demo
|
03e5731ba632caada819dd70d0f9dc07c98308a1
|
e3b80dcc0e0bc2437a0b2882e17563c8171460a2
|
refs/heads/master
| 2020-03-23T22:00:57.515258 | 2018-09-07T15:33:22 | 2018-09-07T15:33:22 | 142,147,622 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 736 |
py
|
class student(object):
def __init__(self,name,age):
self.__name = name #私有属性
self.__age = age
def print_student(self):
print('%s : %s' % (self.__name,self.__age))
def get_name(self):
return self.__name
def get_age(self):
return self.__age
def set_name(self,name):
self.__name=name
zhangsan=student('zhangsan',25)
lisi=student('lisi',22)
zhangsan.print_student()
lisi.print_student()
#非法操作,私有属性不可以直接访问
# print(lisi.__name)
# lisi.__name='wangwu'
# lisi.print_student()
print(zhangsan.get_name())
print(zhangsan.get_age())
zhangsan.set_name("zhangsan2")
print(zhangsan.get_name())
|
[
"[email protected]"
] | |
758711a827b03415e97b4a4791f38fa8ead78674
|
98c6ea9c884152e8340605a706efefbea6170be5
|
/examples/data/Assignment_4/lngchl001/ndom.py
|
7c44c51178f0b565fa9e5abcdb94faf7ca087691
|
[] |
no_license
|
MrHamdulay/csc3-capstone
|
479d659e1dcd28040e83ebd9e3374d0ccc0c6817
|
6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2
|
refs/heads/master
| 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 843 |
py
|
def ndom_to_decimal(a):
b=0
c=1
firstmult=0
for i in range(len(str(a))):
first=eval(str(a)[b:c])
firstmult+=first
firstmult=firstmult*6
b+=1
c+=1
final=(firstmult//6)
return int(final)
def decimal_to_ndom(a):
dec1=''
while True:
dec=str(round(a%6,1))
dec1= dec+dec1
a=round(a//6)
#print(dec1,end="")
#ans=ans[::-1]
#print(ans)
if a==0:
break
return int(dec1)
def ndom_add(a,b):
no1=ndom_to_decimal(a)
no2=ndom_to_decimal(b)
added=no1+no2
no3=decimal_to_ndom(added)
return no3
def ndom_multiply(a,b):
no1=ndom_to_decimal(a)
no2=ndom_to_decimal(b)
mult=no1*no2
no3=decimal_to_ndom(mult)
return no3
|
[
"[email protected]"
] | |
768a967f1b7f95199fec72a2acc7d628d7aba565
|
e0b0abc1e8d442adb63cf0e5b8641f4185c5611c
|
/apps/myapp/migrations/0003_auto_20190416_1503.py
|
0afc907d80ed6c5f380233840e2ef2b88fb44147
|
[] |
no_license
|
nramiscal/petDemo
|
f7d0c149941b2efabdb74a4e10717f540b6d7eee
|
c4d341105c1b1f14e868f198199fe37cf3687c8f
|
refs/heads/master
| 2020-05-14T08:21:23.669095 | 2019-04-17T23:02:24 | 2019-04-17T23:02:24 | 181,721,049 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,697 |
py
|
# Generated by Django 2.1.4 on 2019-04-16 15:03
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('myapp', '0002_auto_20190416_1503'),
]
operations = [
migrations.CreateModel(
name='Pet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('breed', models.CharField(max_length=255)),
('species', models.CharField(max_length=255)),
('age', models.IntegerField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fname', models.CharField(max_length=255)),
('lname', models.CharField(max_length=255)),
('email', models.CharField(max_length=255)),
('phone', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.AddField(
model_name='pet',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='pets', to='myapp.User'),
),
]
|
[
"[email protected]"
] | |
fe0b74f15b5d93ae0b7558f4334f7d75135b18f6
|
6223dc2e5de7921696cb34fb62142fd4a4efe361
|
/.metadata/.plugins/org.eclipse.core.resources/.history/ec/e0e81f3a218f001414849ea2c9622fd7
|
62f7a07fa6b5d81c8ef0610651721bffd9527d76
|
[] |
no_license
|
Mushirahmed/python_workspace
|
5ef477b2688e8c25b1372f546752501ee53d93e5
|
46e2ed783b17450aba29e4e2df7b656522b2b03b
|
refs/heads/master
| 2021-03-12T19:24:50.598982 | 2015-05-25T10:23:54 | 2015-05-25T10:23:54 | 24,671,376 | 0 | 1 | null | 2015-02-06T09:27:40 | 2014-10-01T08:40:33 |
Python
|
UTF-8
|
Python
| false | false | 4,108 |
#!/usr/bin/env python
#
# Copyright 2014 <+YOU OR YOUR COMPANY+>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import gras
import numpy
import serial
import time
import sys
from sbhs import *
from scan_machines import *
import IN
class sbfan(gras.Block):
def __init__(self,window,fan_value,heat_value):
self.n = window
self.fan = fan_value
self.heat = heat_value
#self.set_fan_heat(self.fan,self.heat)
#self.fan1 = 0
#self.heat1 = 0
gras.Block.__init__(self,
name="sbfan",
in_sig=[numpy.float32,numpy.float32],
out_sig=[numpy.float32])
#from scan_machines import *
print "Scanning Machines"
scan_machines()
# SBHS init
self.new_device = Sbhs()
self.new_device.connect(1)
self.new_device.connect_device(0)
def set_fan_heat(self,fan_value,heat_value):
self.fan = fan_value
self.heat = heat_value
#self.new_device.setFan(self.fan)
#self.new_device.setHeat(self.heat)
return (self.fan,self.heat)
"""def set_parameters(self,window,fan_value,heat_value):
self.n = window
self.fan = fan_value
self.heat = heat_value"""
def isIntegralWin(self,input_item,window):
if(len(input_item) % window):
raise Exception(" Value of Window should be an integral value of length of input items")
def work(self, input_items, output_items):
#for heat_items in input_items[0]:
#print "Heat Written", heat_items
# Set heat as 0 for negative values of heat
#if heat_items < 0:
#self.new_device.setHeat(0)
#else:
# self.new_device.setHeat(heat_items)
#in0 = []
#in1 = []
out = output_items[0]
"""
in0 = input_items[0][0]
print "Input Zero : ",in0
in1 = input_items[1][0]
print "Input One : ",in1
self.new_device.setFan(in0)
self.new_device.setHeat(in1)
"""
print "INPUT_LENGTH",len(input_items)
for i in range(0,len(input_items-1)):
print " I ",i
in0 = input_items[i][0]
print "IN0 :",in0
#in1 = input_items[i][0]
self.new_device.setFan(in0)
#self.new_device.setHeat(in1)
for i in range(1,len(input_items-1)):
print " I ",i
in1 = input_items[i][0]
print "IN0 :",in1
#in1 = input_items[i][0]
self.new_device.setHeat(in1)
#self.new_device.setHeat(in1)
#self.set_fan_heat(self.fan,self.heat)
#new_fan,new_heat = self.set_fan_heat(in0,in1)
#self.new_device.setFan(self.fan)
#self.new_device.setFan(new_fan)
#time.sleep(0.5)
#self.new_device.setHeat(self.heat)
#self.new_device.setHeat(new_heat)
#time.sleep(0.5)
#For zero Temperatures
if not self.new_device.getTemp():
raise Exception(" Check SBHS conection try relogging it and run scan_machines.py")
#get temperature
a = self.new_device.getTemp()
#out = output_items[:]
print "A : ",a
out[:] = a
#out[:1] = a
print "Temperature:" ,output_items
#print "temperature:" ,out
#self.consume(0,1) #consume from port 0
self.consume(0,1)
self.consume(1,1)
self.produce(0,1)
|
[
"[email protected]"
] | ||
32c0b16bc6274f687e88566938c5cf3eb5a7cbf1
|
94575dcfd3699f4654fa6102cc908e929306b964
|
/search/templatetags/search_tags.py
|
11993c2a81d9f2ff40b9781ef7fa1d3f3e9be82c
|
[] |
no_license
|
ViktorOgnev/tradenplay
|
7c0f0535edd25dee17aacbb0bda0f28987547003
|
b9d303ec4b1db3a97f04b8e3a17445a48ea8a9e9
|
refs/heads/master
| 2020-12-24T15:41:03.589087 | 2013-11-12T21:06:26 | 2013-11-12T21:06:26 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 773 |
py
|
import urllib
from django import template
from search.forms import SearchForm
register = template.Library()
@register.inclusion_tag("tags/search_box.html")
def search_box(request):
q = request.GET.get('q', '')
form = SearchForm({'q': q})
return {'form': form}
@register.inclusion_tag("tags/pagination_links.html")
def pagination_links(request, paginator):
raw_params = request.GET.copy()
page = raw_params.get('page', 1)
p = paginator.page(page)
try:
del raw_params['page']
except KeyError:
pass
params = urllib.urlencode(raw_params)
return {'request': request,
'paginator':paginator,
'p': p,
'params': params
}
|
[
"[email protected]"
] | |
9e859f7f3957aec59205b030144805f23ffc59c5
|
ee6acbd5fcd0fcd16230e96a4a539de41a02c97e
|
/operators/submariner/python/setup.py
|
c44ed5e96d91acc0c0fa554327e991420f75af4c
|
[
"Apache-2.0"
] |
permissive
|
isabella232/pulumi-kubernetes-crds
|
777e78137aaf6525a44b61a02dccf91bf0d87a14
|
372c4c0182f6b899af82d6edaad521aa14f22150
|
refs/heads/master
| 2023-03-15T04:29:16.039753 | 2020-12-30T19:35:54 | 2020-12-30T19:35:54 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,856 |
py
|
# coding=utf-8
# *** WARNING: this file was generated by crd2pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import errno
from setuptools import setup, find_packages
from setuptools.command.install import install
from subprocess import check_call
class InstallPluginCommand(install):
def run(self):
install.run(self)
try:
check_call(['pulumi', 'plugin', 'install', 'resource', 'pulumi_kubernetes_crds_operators_submariner', '${PLUGIN_VERSION}'])
except OSError as error:
if error.errno == errno.ENOENT:
print("""
There was an error installing the pulumi_kubernetes_crds_operators_submariner resource provider plugin.
It looks like `pulumi` is not installed on your system.
Please visit https://pulumi.com/ to install the Pulumi CLI.
You may try manually installing the plugin by running
`pulumi plugin install resource pulumi_kubernetes_crds_operators_submariner ${PLUGIN_VERSION}`
""")
else:
raise
def readme():
with open('README.md', encoding='utf-8') as f:
return f.read()
setup(name='pulumi_pulumi_kubernetes_crds_operators_submariner',
version='${VERSION}',
long_description=readme(),
long_description_content_type='text/markdown',
cmdclass={
'install': InstallPluginCommand,
},
packages=find_packages(),
package_data={
'pulumi_pulumi_kubernetes_crds_operators_submariner': [
'py.typed'
]
},
install_requires=[
'parver>=0.2.1',
'pulumi>=2.0.0,<3.0.0',
'pyyaml>=5.1,<5.2',
'requests>=2.21.0,<2.22.0',
'semver>=2.8.1'
],
zip_safe=False)
|
[
"[email protected]"
] | |
699b70eb414da2b57563595e6a27cd4605d03fff
|
fa44abffdfe00e5a44fffe6610dce25a325ee93e
|
/instagram_clone/users/migrations/0013_auto_20180815_1540.py
|
e22b26aaf1bcb563d48860f3fb74a24da6fe6537
|
[
"MIT"
] |
permissive
|
devjinius/IG_clone
|
e539e44318cdf9baf5137057a0c671e8748c36bf
|
6a525524ec357d5617b09e013b449df2ec9336ad
|
refs/heads/master
| 2020-03-21T08:55:23.803771 | 2018-12-07T13:55:25 | 2018-12-07T13:55:25 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 479 |
py
|
# Generated by Django 2.0.6 on 2018-08-15 06:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0012_auto_20180811_1614'),
]
operations = [
migrations.AlterField(
model_name='user',
name='gender',
field=models.CharField(choices=[('not-specified', 'Not Specified'), ('femail', 'Female'), ('male', 'Male')], max_length=80, null=True),
),
]
|
[
"[email protected]"
] | |
a1ce1f81eb074822e1b694d7c4fbd4c00ab933b6
|
a03c75b3a47c3d0f4662b5115162074802698bec
|
/w3/tests/tests.py
|
133f0638b446cf0250123b92da0338ccac9e1133
|
[] |
no_license
|
shants/fss18
|
0ea88a4a41684a8b47a8b4aadafe285d90d172ef
|
4e78ed3529934ca2f2d3f984ce4ba2df77b5b300
|
refs/heads/master
| 2020-03-26T22:37:40.406379 | 2018-11-01T20:24:03 | 2018-11-01T20:24:03 | 145,474,500 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,517 |
py
|
#ACK Did refer to code https://github.com/FahmidMorshed/fss18/tree/master/Week%203
# Once or twice to get doubt cleared, did not copy
import testingModule
import os,sys,inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
from Sample import Sample
from Sym import Sym
from Num import Num
from testingModule import O
import random
@O.k
def testing_Sample():
random.seed(1)
s = []
#-- create some samples holding 32,64,128... max items
for i in range(5,10):
o = Sample(2**i)
s.append(o)
#-- 10,000 store the same number in all samples
for i in range(1,10000):
y= random.random()
for t in s:
t.sampleInc(y)
#-- check if any of them are +/- 0.2 of 0.5
for t in s:
o = t.nth(0.5)
print(t.max, o)
assert (( o >= .05-.33) and ( o <= 0.5+.33 ))
#assert 1 == 1
@O.k
def testing_Sym():
s1 = Sym()
s1 = s1.syms([ 'y','y','y','y','y','y','y','y','y', 'n','n','n','n','n'])
print(s1.symEnt())
assert round(s1.symEnt(),4) == 0.9403
#assert 1 == 1
@O.k
def testing_Num():
n1 = Num()
n1 = n1.nums([ 4,10,15,38,54,57,62,83,100,100,174,190,215,225,
233,250,260,270,299,300,306,333,350,375,443,475,
525,583,780,1000])
print(n1.mu, n1.sd)
assert(n1.mu == 270.3 )
assert(round(n1.sd,3) == 231.946)
if __name__== "__main__":
O.report()
|
[
"[email protected]"
] | |
a12920082831e56c19bf3a6ab7bca2d8d737aa11
|
53181572c4b22df4b569a9901bcd5347a3459499
|
/ceit_190910_zhuhaotian/quiz2_1025/q3_sort_words.py
|
2b92084de924b1aa44ec88047e5d2dd08b3b85e5
|
[] |
no_license
|
edu-athensoft/ceit4101python_student
|
80ef067b77421fce76d04f778d5c6de8b12f676c
|
33cfa438c062d45e8d246b853e93d3c14b92ff2d
|
refs/heads/master
| 2020-07-30T01:04:21.084384 | 2020-07-27T02:21:57 | 2020-07-27T02:21:57 | 210,027,310 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 285 |
py
|
# sorting words
# input 5 words from keyboard
#
word_list = []
word_list.append('aaa')
word_list.append('bab')
word_list.append('aba')
word_list.append('baa')
word_list.append('abb')
# word_list = ['aaa','bab','aba','baa','abb']
print(word_list)
word_list.sort();
print(word_list)
|
[
"[email protected]"
] | |
bf0b4282f32489a6113d59a0f489ec203840d4e7
|
9f91fa2910d13273a50ae416c116e16385a4eb95
|
/natvenv/env/bin/futurize
|
f4d2e794f7d69d3d86a7f87393fb0f58566e13ad
|
[] |
no_license
|
natrayanp/mysb_v2
|
cac811e7f66670f8546cccdbca386ba6ff4f8cd6
|
24dea04e2a631ca6b465b3f62077a83a5dce9758
|
refs/heads/master
| 2022-11-20T16:49:30.341095 | 2018-07-31T17:18:04 | 2018-07-31T17:18:04 | 116,319,931 | 0 | 1 | null | 2022-11-14T21:09:17 | 2018-01-05T00:05:13 |
Python
|
UTF-8
|
Python
| false | false | 457 |
#!/home/natrayan/project/AwsProject/Python/Tradingapp/tradingapp5/natvenv/env/bin/python3.6
# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.16.0','console_scripts','futurize'
__requires__ = 'future==0.16.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('future==0.16.0', 'console_scripts', 'futurize')()
)
|
[
"[email protected]"
] | ||
c27e5d09683ac47879f811cb0cae11bcab8ea895
|
44064ed79f173ddca96174913910c1610992b7cb
|
/Second_Processing_app/temboo/Library/eBay/Trading/GetMemberMessages.py
|
d16e5be16b3903621ecda033d6f17f043b859ee1
|
[] |
no_license
|
dattasaurabh82/Final_thesis
|
440fb5e29ebc28dd64fe59ecd87f01494ed6d4e5
|
8edaea62f5987db026adfffb6b52b59b119f6375
|
refs/heads/master
| 2021-01-20T22:25:48.999100 | 2014-10-14T18:58:00 | 2014-10-14T18:58:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,062 |
py
|
# -*- coding: utf-8 -*-
###############################################################################
#
# GetMemberMessages
# Retrieves a list of the messages that buyers have posted about your active item listings.
#
# Python version 2.6
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetMemberMessages(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetMemberMessages Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
Choreography.__init__(self, temboo_session, '/Library/eBay/Trading/GetMemberMessages')
def new_input_set(self):
return GetMemberMessagesInputSet()
def _make_result_set(self, result, path):
return GetMemberMessagesResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetMemberMessagesChoreographyExecution(session, exec_id, path)
class GetMemberMessagesInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetMemberMessages
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_DisplayToPublic(self, value):
"""
Set the value of the DisplayToPublic input for this Choreo. ((optional, boolean) When set to true, only public messages (viewable in the Item listing) are returned.)
"""
InputSet._set_input(self, 'DisplayToPublic', value)
def set_EndCreationTime(self, value):
"""
Set the value of the EndCreationTime input for this Choreo. ((optional, date) Used to filter by date range (e.g., 2013-02-08T00:00:00.000Z).)
"""
InputSet._set_input(self, 'EndCreationTime', value)
def set_EntriesPerPage(self, value):
"""
Set the value of the EntriesPerPage input for this Choreo. ((optional, integer) The maximum number of records to return in the result.)
"""
InputSet._set_input(self, 'EntriesPerPage', value)
def set_ItemID(self, value):
"""
Set the value of the ItemID input for this Choreo. ((optional, string) The ID of the item the message is about.)
"""
InputSet._set_input(self, 'ItemID', value)
def set_MailMessageType(self, value):
"""
Set the value of the MailMessageType input for this Choreo. ((required, string) The type of message to retrieve. Valid values are: All and AskSellerQuestion. When set to AskSellerQuestion, ItemID or a date range filter must be specified.)
"""
InputSet._set_input(self, 'MailMessageType', value)
def set_MemberMessageID(self, value):
"""
Set the value of the MemberMessageID input for this Choreo. ((optional, string) An ID that uniquely identifies the message for a given user to be retrieved.)
"""
InputSet._set_input(self, 'MemberMessageID', value)
def set_MessageStatus(self, value):
"""
Set the value of the MessageStatus input for this Choreo. ((optional, string) The status of the message. Valid values are: Answered and Unanswered.)
"""
InputSet._set_input(self, 'MessageStatus', value)
def set_PageNumber(self, value):
"""
Set the value of the PageNumber input for this Choreo. ((optional, integer) Specifies the page number of the results to return.)
"""
InputSet._set_input(self, 'PageNumber', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: json (the default) and xml.)
"""
InputSet._set_input(self, 'ResponseFormat', value)
def set_SandboxMode(self, value):
"""
Set the value of the SandboxMode input for this Choreo. ((optional, boolean) Indicates that the request should be made to the sandbox endpoint instead of the production endpoint. Set to 1 to enable sandbox mode.)
"""
InputSet._set_input(self, 'SandboxMode', value)
def set_SenderID(self, value):
"""
Set the value of the SenderID input for this Choreo. ((optional, string) The seller's UserID.)
"""
InputSet._set_input(self, 'SenderID', value)
def set_SiteID(self, value):
"""
Set the value of the SiteID input for this Choreo. ((optional, string) The eBay site ID that you want to access. Defaults to 0 indicating the US site.)
"""
InputSet._set_input(self, 'SiteID', value)
def set_StartCreationTime(self, value):
"""
Set the value of the StartCreationTime input for this Choreo. ((optional, date) Used to filter by date range (e.g., 2013-02-08T00:00:00.000Z).)
"""
InputSet._set_input(self, 'StartCreationTime', value)
def set_UserToken(self, value):
"""
Set the value of the UserToken input for this Choreo. ((required, string) A valid eBay Auth Token.)
"""
InputSet._set_input(self, 'UserToken', value)
class GetMemberMessagesResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetMemberMessages Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from eBay.)
"""
return self._output.get('Response', None)
class GetMemberMessagesChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetMemberMessagesResultSet(response, path)
|
[
"[email protected]"
] | |
08f80ff0d0bbbd7b910370538714a85bfaf01765
|
5e6d8b9989247801718dd1f10009f0f7f54c1eb4
|
/sdk/python/pulumi_azure_native/datashare/v20210801/blob_container_data_set.py
|
acb5a305365291b9c2b2853d7a6f8214d71848df
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
vivimouret29/pulumi-azure-native
|
d238a8f91688c9bf09d745a7280b9bf2dd6d44e0
|
1cbd988bcb2aa75a83e220cb5abeb805d6484fce
|
refs/heads/master
| 2023-08-26T05:50:40.560691 | 2021-10-21T09:25:07 | 2021-10-21T09:25:07 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 15,973 |
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = ['BlobContainerDataSetArgs', 'BlobContainerDataSet']
@pulumi.input_type
class BlobContainerDataSetArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
container_name: pulumi.Input[str],
kind: pulumi.Input[str],
resource_group: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
share_name: pulumi.Input[str],
storage_account_name: pulumi.Input[str],
subscription_id: pulumi.Input[str],
data_set_name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a BlobContainerDataSet resource.
:param pulumi.Input[str] account_name: The name of the share account.
:param pulumi.Input[str] container_name: BLOB Container name.
:param pulumi.Input[str] kind: Kind of data set.
Expected value is 'Container'.
:param pulumi.Input[str] resource_group: Resource group of storage account
:param pulumi.Input[str] resource_group_name: The resource group name.
:param pulumi.Input[str] share_name: The name of the share to add the data set to.
:param pulumi.Input[str] storage_account_name: Storage account name of the source data set
:param pulumi.Input[str] subscription_id: Subscription id of storage account
:param pulumi.Input[str] data_set_name: The name of the dataSet.
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "container_name", container_name)
pulumi.set(__self__, "kind", 'Container')
pulumi.set(__self__, "resource_group", resource_group)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "share_name", share_name)
pulumi.set(__self__, "storage_account_name", storage_account_name)
pulumi.set(__self__, "subscription_id", subscription_id)
if data_set_name is not None:
pulumi.set(__self__, "data_set_name", data_set_name)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
"""
The name of the share account.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> pulumi.Input[str]:
"""
BLOB Container name.
"""
return pulumi.get(self, "container_name")
@container_name.setter
def container_name(self, value: pulumi.Input[str]):
pulumi.set(self, "container_name", value)
@property
@pulumi.getter
def kind(self) -> pulumi.Input[str]:
"""
Kind of data set.
Expected value is 'Container'.
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: pulumi.Input[str]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> pulumi.Input[str]:
"""
Resource group of storage account
"""
return pulumi.get(self, "resource_group")
@resource_group.setter
def resource_group(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The resource group name.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="shareName")
def share_name(self) -> pulumi.Input[str]:
"""
The name of the share to add the data set to.
"""
return pulumi.get(self, "share_name")
@share_name.setter
def share_name(self, value: pulumi.Input[str]):
pulumi.set(self, "share_name", value)
@property
@pulumi.getter(name="storageAccountName")
def storage_account_name(self) -> pulumi.Input[str]:
"""
Storage account name of the source data set
"""
return pulumi.get(self, "storage_account_name")
@storage_account_name.setter
def storage_account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "storage_account_name", value)
@property
@pulumi.getter(name="subscriptionId")
def subscription_id(self) -> pulumi.Input[str]:
"""
Subscription id of storage account
"""
return pulumi.get(self, "subscription_id")
@subscription_id.setter
def subscription_id(self, value: pulumi.Input[str]):
pulumi.set(self, "subscription_id", value)
@property
@pulumi.getter(name="dataSetName")
def data_set_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the dataSet.
"""
return pulumi.get(self, "data_set_name")
@data_set_name.setter
def data_set_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "data_set_name", value)
class BlobContainerDataSet(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
data_set_name: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
resource_group: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
share_name: Optional[pulumi.Input[str]] = None,
storage_account_name: Optional[pulumi.Input[str]] = None,
subscription_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
An Azure storage blob container data set.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The name of the share account.
:param pulumi.Input[str] container_name: BLOB Container name.
:param pulumi.Input[str] data_set_name: The name of the dataSet.
:param pulumi.Input[str] kind: Kind of data set.
Expected value is 'Container'.
:param pulumi.Input[str] resource_group: Resource group of storage account
:param pulumi.Input[str] resource_group_name: The resource group name.
:param pulumi.Input[str] share_name: The name of the share to add the data set to.
:param pulumi.Input[str] storage_account_name: Storage account name of the source data set
:param pulumi.Input[str] subscription_id: Subscription id of storage account
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: BlobContainerDataSetArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
An Azure storage blob container data set.
:param str resource_name: The name of the resource.
:param BlobContainerDataSetArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BlobContainerDataSetArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
data_set_name: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
resource_group: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
share_name: Optional[pulumi.Input[str]] = None,
storage_account_name: Optional[pulumi.Input[str]] = None,
subscription_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BlobContainerDataSetArgs.__new__(BlobContainerDataSetArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
if container_name is None and not opts.urn:
raise TypeError("Missing required property 'container_name'")
__props__.__dict__["container_name"] = container_name
__props__.__dict__["data_set_name"] = data_set_name
if kind is None and not opts.urn:
raise TypeError("Missing required property 'kind'")
__props__.__dict__["kind"] = 'Container'
if resource_group is None and not opts.urn:
raise TypeError("Missing required property 'resource_group'")
__props__.__dict__["resource_group"] = resource_group
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if share_name is None and not opts.urn:
raise TypeError("Missing required property 'share_name'")
__props__.__dict__["share_name"] = share_name
if storage_account_name is None and not opts.urn:
raise TypeError("Missing required property 'storage_account_name'")
__props__.__dict__["storage_account_name"] = storage_account_name
if subscription_id is None and not opts.urn:
raise TypeError("Missing required property 'subscription_id'")
__props__.__dict__["subscription_id"] = subscription_id
__props__.__dict__["data_set_id"] = None
__props__.__dict__["name"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:datashare/v20210801:BlobContainerDataSet"), pulumi.Alias(type_="azure-native:datashare:BlobContainerDataSet"), pulumi.Alias(type_="azure-nextgen:datashare:BlobContainerDataSet"), pulumi.Alias(type_="azure-native:datashare/v20181101preview:BlobContainerDataSet"), pulumi.Alias(type_="azure-nextgen:datashare/v20181101preview:BlobContainerDataSet"), pulumi.Alias(type_="azure-native:datashare/v20191101:BlobContainerDataSet"), pulumi.Alias(type_="azure-nextgen:datashare/v20191101:BlobContainerDataSet"), pulumi.Alias(type_="azure-native:datashare/v20200901:BlobContainerDataSet"), pulumi.Alias(type_="azure-nextgen:datashare/v20200901:BlobContainerDataSet"), pulumi.Alias(type_="azure-native:datashare/v20201001preview:BlobContainerDataSet"), pulumi.Alias(type_="azure-nextgen:datashare/v20201001preview:BlobContainerDataSet")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(BlobContainerDataSet, __self__).__init__(
'azure-native:datashare/v20210801:BlobContainerDataSet',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'BlobContainerDataSet':
"""
Get an existing BlobContainerDataSet resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = BlobContainerDataSetArgs.__new__(BlobContainerDataSetArgs)
__props__.__dict__["container_name"] = None
__props__.__dict__["data_set_id"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["name"] = None
__props__.__dict__["resource_group"] = None
__props__.__dict__["storage_account_name"] = None
__props__.__dict__["subscription_id"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
return BlobContainerDataSet(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> pulumi.Output[str]:
"""
BLOB Container name.
"""
return pulumi.get(self, "container_name")
@property
@pulumi.getter(name="dataSetId")
def data_set_id(self) -> pulumi.Output[str]:
"""
Unique id for identifying a data set resource
"""
return pulumi.get(self, "data_set_id")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[str]:
"""
Kind of data set.
Expected value is 'Container'.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the azure resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> pulumi.Output[str]:
"""
Resource group of storage account
"""
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="storageAccountName")
def storage_account_name(self) -> pulumi.Output[str]:
"""
Storage account name of the source data set
"""
return pulumi.get(self, "storage_account_name")
@property
@pulumi.getter(name="subscriptionId")
def subscription_id(self) -> pulumi.Output[str]:
"""
Subscription id of storage account
"""
return pulumi.get(self, "subscription_id")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
System Data of the Azure resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Type of the azure resource
"""
return pulumi.get(self, "type")
|
[
"[email protected]"
] | |
cbec3524bde8a5ee537bdc6d6c7858463522890b
|
199677a3a5b4d205f03f39ef517f17a0f1fd450d
|
/transformer/Optim.py
|
9f1a30ccc4f6ba35e173be280643bbdbcc4abc8f
|
[] |
no_license
|
RRisto/jyri_bot
|
b304b834db3cb059afd63246fecd80569fea8f01
|
0018235436331cf733051a0125069cf052c1d28a
|
refs/heads/master
| 2023-05-14T12:19:40.672041 | 2020-03-15T16:11:13 | 2020-03-15T16:11:13 | 243,827,027 | 0 | 0 | null | 2023-05-01T21:21:31 | 2020-02-28T18:18:04 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 2,329 |
py
|
import torch
import numpy as np
# code from AllenNLP
class CosineWithRestarts(torch.optim.lr_scheduler._LRScheduler):
"""
Cosine annealing with restarts.
Parameters
----------
optimizer : torch.optim.Optimizer
T_max : int
The maximum number of iterations within the first cycle.
eta_min : float, optional (default: 0)
The minimum learning rate.
last_epoch : int, optional (default: -1)
The index of the last epoch.
"""
def __init__(self,
optimizer: torch.optim.Optimizer,
T_max: int,
eta_min: float = 0.,
last_epoch: int = -1,
factor: float = 1.) -> None:
# pylint: disable=invalid-name
self.T_max = T_max
self.eta_min = eta_min
self.factor = factor
self._last_restart: int = 0
self._cycle_counter: int = 0
self._cycle_factor: float = 1.
self._updated_cycle_len: int = T_max
self._initialized: bool = False
super(CosineWithRestarts, self).__init__(optimizer, last_epoch)
def get_lr(self):
"""Get updated learning rate."""
# HACK: We need to check if this is the first time get_lr() was called, since
# we want to start with step = 0, but _LRScheduler calls get_lr with
# last_epoch + 1 when initialized.
if not self._initialized:
self._initialized = True
return self.base_lrs
step = self.last_epoch + 1
self._cycle_counter = step - self._last_restart
lrs = [
(
self.eta_min + ((lr - self.eta_min) / 2) *
(
np.cos(
np.pi *
((self._cycle_counter) % self._updated_cycle_len) /
self._updated_cycle_len
) + 1
)
) for lr in self.base_lrs
]
if self._cycle_counter % self._updated_cycle_len == 0:
# Adjust the cycle length.
self._cycle_factor *= self.factor
self._cycle_counter = 0
self._updated_cycle_len = int(self._cycle_factor * self.T_max)
self._last_restart = step
return lrs
|
[
"[email protected]"
] | |
6315f499af64fbc9b68fcc99493d8765b4c830e6
|
db2a6433d6cbc0652308ad31f0ceab12a32cc37e
|
/data/base_data_manager.py
|
623a8cbaab49ffe755b3ce77b17aa97e46a13c5d
|
[] |
no_license
|
ablewald/RLDP
|
c42e6657024410753c770614feaad5ee505eb2a5
|
6d39348e94ec86f3f9f43fc0eaba112c5b2e13c7
|
refs/heads/master
| 2020-04-26T20:49:05.565537 | 2019-03-11T16:52:08 | 2019-03-11T16:52:08 | 173,822,474 | 1 | 0 | null | 2019-03-04T21:14:36 | 2019-03-04T21:14:36 | null |
UTF-8
|
Python
| false | false | 1,969 |
py
|
import logging
from typing import NamedTuple, Optional
import pandas as pd
from carball.generated.api.game_pb2 import Game
from data.utils.utils import normalise_df
logger = logging.getLogger(__name__)
class GameData(NamedTuple):
proto: Optional[Game]
df: Optional[pd.DataFrame]
class DataManager:
"""
Abstract class that implements get_data() and the need_proto, need_df, and normalise_df attributes.
Also implements the various methods required from subclasses.
"""
def __init__(self, need_proto: bool = False, need_df: bool = False, normalise_df: bool = True):
"""
:param need_proto: Whether to load the .proto attribute when get_data is called.
:param need_df: Whether to load the .df attribute when get_data is called.
:param normalise_df: Whether to normalise the df when get_data is called.
"""
self.need_proto = need_proto
self.need_df = need_df
self.normalise_df = normalise_df
def get_data(self, id_: str) -> GameData:
"""
Returns a GameData object which has a .proto and .df attribute.
Both default to None, unless self.need_proto or self.need_df are True respectively.
If self.normalise_df is True, the returned GameData.df would be normalised.
:param id_: Replay id
:return: GameData object which has a .proto and .df attribute.
"""
proto = self.get_proto(id_) if self.need_proto else None
df = self.get_df(id_) if self.need_df else None
if self.normalise_df:
df = normalise_df(df)
logger.info(f"Got data for replay: {id_}")
return GameData(proto, df)
def get_replay_list(self, num: int = 50):
raise NotImplementedError()
def get_df(self, id_: str) -> pd.DataFrame:
raise NotImplementedError()
def get_proto(self, id_: str) -> Game:
raise NotImplementedError()
class BrokenDataError(Exception):
pass
|
[
"[email protected]"
] | |
476f7760da9d24dd776d5c5707f96d54ad244c15
|
1d05ebcbfcf806f5459990a6a64e73b48ba9892d
|
/docs/conf.py
|
cd2d2b71b2982fd6fa2387e27e3f406013990318
|
[
"MIT"
] |
permissive
|
asmodehn/caerbannog
|
2da3cafb5c0323a4802467b1d42573ad450ed1e7
|
47bb1138190748041a4c0d02e522c0924a9af962
|
refs/heads/master
| 2023-07-09T15:34:04.100983 | 2019-04-03T09:15:19 | 2019-04-03T09:15:19 | 167,014,660 | 0 | 0 |
MIT
| 2019-03-10T11:12:01 | 2019-01-22T15:10:09 |
Python
|
UTF-8
|
Python
| false | false | 5,490 |
py
|
# -*- coding: utf-8 -*-
#
# Caerbannog documentation build configuration file, created by
# sphinx-quickstart on Sat Feb 9 19:32:44 2019.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(1, os.path.abspath('../01'))
sys.path.insert(1, os.path.abspath('../02'))
sys.path.insert(1, os.path.abspath('../03'))
sys.path.insert(1, os.path.abspath('../04'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.imgmath',
'sphinx.ext.viewcode',
'sphinx.ext.intersphinx',
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Caerbannog'
copyright = u'2019, AlexV'
author = u'AlexV'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Link to python documentation
intersphinx_mapping = {'python': ('https://docs.python.org/3', None)}
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Caerbannogdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Caerbannog.tex', u'Caerbannog Documentation',
u'AlexV', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'caerbannog', u'Caerbannog Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Caerbannog', u'Caerbannog Documentation',
author, 'Caerbannog', 'Follow the white rabbit.',
'Miscellaneous'),
]
|
[
"[email protected]"
] | |
d95b0548f2e6420f3ccee5b833c033df4a965499
|
6e12c2e6d453ea1caf64c0eafaf3410b30f434e0
|
/shop/migrations/0034_supplier_show_in_order.py
|
fe6c4a45d5be5bbddeb3d730c251d63778ff5f7d
|
[] |
no_license
|
andreynovikov/django-shop
|
43b66ec639037473cd72f7480f83811d911104fb
|
8f843b0b15354903a335c324daa65714bfb2f8cc
|
refs/heads/master
| 2023-04-28T01:26:16.938227 | 2023-04-22T15:33:29 | 2023-04-22T15:33:29 | 43,815,663 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 477 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('shop', '0033_auto_20161125_1809'),
]
operations = [
migrations.AddField(
model_name='supplier',
name='show_in_order',
field=models.BooleanField(db_index=True, verbose_name='показывать в заказе', default=False),
),
]
|
[
"[email protected]"
] | |
36db238a5b5abe3f0914813641510bb82143db4f
|
51888119e10cdff12dafb060a54824632edccf3f
|
/Folders/Python/expressionMy.py
|
14699ba1a009fe68ceae66e06157b87238811cde
|
[
"BSD-2-Clause"
] |
permissive
|
kuchinal/lamakaha
|
b64511ad8c6d2b36da5a84a266b9e7a69acd3106
|
24e3b2ff53bcac2ad1c0e5a3b9afd4593d85f22d
|
refs/heads/master
| 2023-09-01T17:55:56.551183 | 2023-07-31T19:32:04 | 2023-07-31T19:32:04 | 182,849,747 | 0 | 0 | null | 2021-09-10T06:34:22 | 2019-04-22T19:00:02 |
Python
|
UTF-8
|
Python
| false | false | 370 |
py
|
import nuke
def expressionMy():
try:
if 'Deep' in nuke.selectedNode().Class() and "DeepHoldout" not in nuke.selectedNode()['name'].value() and "DeepToImage" not in nuke.selectedNode()['name'].value():
nuke.createNode("DeepExpression")
else:
nuke.createNode("Expression")
except:
nuke.createNode("Expression")
|
[
"[email protected]"
] | |
4aaf88b9b7c2ae1f4dd6a2157c55eae876572c46
|
ad9bd58a3ec8fa08dfcc994d4101ee815a9f5bc0
|
/02_algorithm/programmers/Level3/정수 삼각형.py
|
799f7bd3aaebc37d94b122073b679fc586cec6c8
|
[] |
no_license
|
wally-wally/TIL
|
93fc1d0e3bc7d030341ed54155294c68c48b4c7d
|
936783bc86f563646c0398c24e2fcaa707f0ed23
|
refs/heads/master
| 2023-04-28T08:59:48.235747 | 2023-04-12T12:06:52 | 2023-04-12T12:06:52 | 195,918,111 | 40 | 7 | null | 2020-09-29T16:20:46 | 2019-07-09T02:31:02 |
Python
|
UTF-8
|
Python
| false | false | 625 |
py
|
def solution(data):
n = len(data)
arr = [[num for num in data[idx]] + [0] * (n - idx - 1) for idx in range(n)]
DP = [[0] * n for _ in range(n)]
DP[0][0] = arr[0][0]
if n == 1:
return DP[0][0]
for i in range(1, n):
for j in range(i + 1):
if j == 0:
DP[i][j] = DP[i - 1][j] + arr[i][j]
elif j == i:
DP[i][j] = DP[i - 1][j - 1] + arr[i][j]
else:
DP[i][j] = max(DP[i - 1][j - 1], DP[i - 1][j]) + arr[i][j]
return max(DP[n - 1])
print(solution([[7], [3, 8], [8, 1, 0], [2, 7, 4, 4], [4, 5, 2, 6, 5]]))
|
[
"[email protected]"
] | |
158323da0447340d0b42dcfb2cfbf3efdd3bed85
|
aec687a4b6f85190981fee4193a67ff2e1ee51a4
|
/semana5/MyHolidays/myholidays/holidays.py
|
9655fe98fff656337b8ff31b09d69ae286f94c4e
|
[] |
no_license
|
brunobuenonapp/NappAcademy-1
|
7766072a9d23e22b7415eae703998292ff1c8413
|
3eddbc90d220724e5e8bc95c2403a1be9ce8d733
|
refs/heads/master
| 2023-03-13T19:48:36.118382 | 2021-03-05T13:31:09 | 2021-03-05T13:31:09 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 100 |
py
|
from datetime import date
class MyCalendar:
def __init__(self, *args):
self.datas = []
|
[
"[email protected]"
] | |
51f4c0948a8ff2aca4506ba0482a184398efee8a
|
c3d0a0b6336a3ff73724fe1615eb1809dbdaaed8
|
/Durga OOPs/Super()_Method_OOPs.py
|
298d926568c6b45e2b71f79a093affe5bdb7a139
|
[] |
no_license
|
Silentsoul04/FTSP_2020
|
db0dae6cd9c371f3daa9219f86520dfa66348236
|
7e603af918da2bcfe4949a4cf5a33107c837894f
|
refs/heads/master
| 2022-12-21T20:44:32.031640 | 2020-09-20T12:29:58 | 2020-09-20T12:29:58 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 15,784 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Feb 29 19:14:09 2020
@author: Rajesh
"""
Super() Method :- It is nothing but parent class methods are called super methods because Parent means super only.
-------------
Question :- Why do we need of super() method ?
Answer :- It is very useful in the python OOPs concepts in Inheritance.
It is used in child class like form child class to call parent call members.
Code Re-useability.
class Person:
def __init__(self,name,age):
self.name = name
self.age = age
# 100 properties.
class Student(Person):
def __init__(self,name,age,rollno,marks):
self.name = name
self.age = age
# 100 properties.
self.rollno = rollno
self.marks = marks
class Teacher(Person):
def __init__(self,name,age,salary,subject):
self.name = name
self.age = age
# 100 properties.
self.salary = salary
self.subject = subject
s = Student('Rajesh',25,1001,75)
t = Teacher('Durga',45,10000,'Python')
************ Result ************
Here we will not be getting any kind of output coz all the codes are correct but duplicates things are so many.
So, we need to use the super() method.
------------------------------------------------------------------------------------------------------------------
class Person:
def __init__(self,name,age): # Parent Constructor
self.name = name
self.age = age
# 100 properties.
class Student(Person):
def __init__(self,name,age,rollno,marks): # child constructor
super().__init__(name,age)
self.rollno = rollno
self.marks = marks
def display(self):
print('Name :', self.name)
print('Age :', self.age)
print('Roll No.', self.rollno)
print('Marks :', self.marks)
class Teacher(Person):
def __init__(self,name,age,salary,subject): # child constructor
super().__init__(name,age)
self.salary = salary
self.subject = subject
def display(self):
print('Name :', self.name)
print('Age :', self.age)
print('Salary :', self.salary)
print('Subject :', self.subject)
s = Student('Rajesh',25,1001,75)
t = Teacher('Durga',45,10000,'Python')
s.display()
print('*'*20)
t.display()
NOTE :- Only one time we have written in the parent constructor and we can call any time in the
child constructors. Here we can see that code re-useability is there.
Code efficieny will be increased automatically.
Question :- The above parent constructor will be executed for child purpose or not ?
Answer :- Yes, offcourse parent constructor will be executed for child class object like
Student & Teacher and i have not created any object for parent class.
NOTE :- Here we are just creating objects for child classes like Student & Teacher and automatically
parent Constrctor will be executed for both the classes. but Parent class object will not be created
Automatically after creating object for child classes.
NOTE :- In this above scenario we can say that the super() method is used for Code Re-usability
& calling the parent constructor by using the help of child class object creation.
NOTE :- Internally, Super() method will be works on MRO Algorithm only.
------------------------------------------------------------------------------------------------------------------
NOTE :- We can see that in the above example Code Re-usability is there for name and age
but there is NO code Re-usability for display() method. We can do some modification in the above codes.
class Person:
def __init__(self,name,age): # Parent Constructor
self.name = name
self.age = age
# 100 properties.
def display(self):
print('Name :', self.name)
print('Age :', self.age)
class Student(Person):
def __init__(self,name,age,rollno,marks): # child constructor
super().__init__(name,age)
self.rollno = rollno
self.marks = marks
def display(self):
super().display()
print('Roll No.', self.rollno)
print('Marks :', self.marks)
class Teacher(Person):
def __init__(self,name,age,salary,subject): # child constructor
super().__init__(name,age)
self.salary = salary
self.subject = subject
def display(self):
super().display()
print('Salary :', self.salary)
print('Subject :', self.subject)
s = Student('Rajesh',25,1001,75)
t = Teacher('Durga',45,10000,'Python')
s.display()
print('*'*20)
t.display()
NOTE :- The biggest use of super() method is to call the parent class constructor and parent class display()
method for every child classes. We just need to define common methods and constructor in parent class always
and easy to access for every child classes and It helps in code Re-usability.
----------------------------------------------------------------------------------------------------------------
Ex :-
A
|
B
|
C
|
D
|
E ---> super().m1()
NOTE :- If E class contains super() method and we just want to call the m1() method in the E class
then the Immediate parent class for E is the class D is parent class. it goes to D class and check it
and If not then D class then it will be going to another classes like C and etc.
----------------------------------------------------------------------------------------------------------------
A --> m1()
|
B --> m1()
|
C --> m1()
|
D --> m1()
|
E ---> super().m1()
NOTE :- If every class contains the m1() method but it always works MRO Algorithm only.
NOTE :- If E class contains super() method and we just want to call the m1() method in the E class
then the Immediate parent class for E is the class D is parent class. it goes to D class and check it
and If not then D class then it will be going to another classes like C and etc.
Ex :-
class A:
def m1(self):
print('A class m1() method')
class B(A):
def m1(self):
print('B class m1() method')
class C(B):
def m1(self):
print('C class m1() method')
class D(C):
def m1(self):
print('D class m1() method')
class E(D):
def m1(self):
print('E class m1() method')
e = E() # Obcject Creation for E class.
e.m1() # calling the m1() of E class. # E class m1() method.
*********** Result **********
E class m1() method
---------------------------------------------------------------------------------------------------------
Ex :-
class A:
def m1(self):
print('A class m1() method')
class B(A):
def m1(self):
print('B class m1() method')
class C(B):
def m1(self):
print('C class m1() method')
class D(C):
def m1(self):
print('D class m1() method')
class E(D):
def m1(self):
super().m1()
e = E() # Obcject Creation for E class.
e.m1() # D class m1() method
*********** Result **********
D class m1() method
-----------------------------------------------------------------------------------------------
Ex :-
class A:
def m1(self):
print('A class m1() method')
class B(A):
def m1(self):
print('B class m1() method')
class C(B):
def m1(self):
print('C class m1() method')
class D(C):pass
class E(D):
def m1(self):
super().m1()
e = E() # Obcject Creation for E class.
e.m1() # C class m1() method
*********** Result **********
C class m1() method
-----------------------------------------------------------------------------------------------------
How to call a particular parent class method by using super() method :-
--------------------------------------------------------------------
Ex :- Here we want to call in E class and m1() method of B class.
NOTE :- They are 2 ways to call the from parent class to in like child class B and etc.
1) parentclassname.methodname(self)
ex :- B.m1(self)
For Constructor also it will work
Ex :- parentclassname.Constructor(self,parameters names)
B.__init__(self,name,age)
2) super(D,self).m1()
class A:
def m1(self):
print('A class m1() method')
class B(A):
def m1(self):
print('B class m1() method')
class C(B):
def m1(self):
print('C class m1() method')
class D(C):
def m1(self):
print('D class m1() method')
class E(D):
def m1(self):
B.m1(self)
A.m1(self)
e = E() # Obcject Creation for E class.
e.m1()
*********** Result **********
B class m1() method
A class m1() method
-------------------------------------------------------------------------------------------
2) super(D,self).m1()
class A:
def m1(self):
print('A class m1() method')
class B(A):
def m1(self):
print('B class m1() method')
class C(B):
def m1(self):
print('C class m1() method')
class D(C):
def m1(self):
print('D class m1() method')
class E(D):
def m1(self):
super(D,self).m1() # super of D class is the class C and It gives m1() method class C.
# super(C,self).m1()
# super(A,self).m1() # AttributeError: 'super' object has no attribute 'm1'
e = E() # Obcject Creation for E class.
e.m1() # C class m1() method
***************** Result ***********
C class m1() method
NOTE :- super(D,self).m1() here only most of the people gets confused and they will be telling the
wrong answer like D class m1() method will be executed but here super of D class m1() method will
be executed and super of D class means C class and then finally the result will be the # C class m1() method
will be executed only.
NOTE :- If we will taking super(C,self).m1() then which class m1() method will be executed
super class C means Class B and then B class m1() method will be executed successfully.
NOTE :- If we will be calling super(A,self).m1() then we will be getting any error coz there is NO
super class of A class. Then will throw an error like " AttributeError: 'super' object has no attribute 'm1' ".
---------------------------------------------------------------------------------------------------------------------------------
class P:
a = 10 # static variable
def __init__(self):
self.b = 20 # Instance variable
class C(P):
def m1(self):
print(super().a) # It will be working
# print(super().b) # error coz we can not use super in the child class and called the parent class Instance variasles.
c = C()
c.m1() # 10
MOTE :- print(super().b) # error coz we can not use super method in the child class and called the parent class Instance variasles.
NOTE :- We can not use super() method in the child class C and can not called the Instance variables of Parent class.
--------------------------------------------------------------------------------------------------------------
Important Points are :-
--------------------
1) From child class by using super() method we can not call parent class Instance variables.
And we should use self keyword only.
2) From child class by using super() method we can parent ia static variables.
class P:
a = 10 # static variable
def __init__(self):
self.b = 20 # Instance variable
class C(P):
def m1(self):
print(super().a) # It will be working
print(self.b) # It will be working also
c = C()
c.m1() # 10
**************** Result ****************
10
20
---------------------------------------------------------------------------------------------------------------
class P:
def __init__(self):
print('Parent class Constructor')
def m1(self):
print('Parent class Instance m1() method')
@classmethod
def m2(cls):
print('Parent class class m2() method')
@staticmethod
def m3():
print('Parent class static m3() method')
class C(P):
def __init__(self): # child class Constructor
print('Child class Constructor')
super().__init__()
super().m1()
super().m2()
super().m3()
c = C() # Object creation for child class C.
************* Result *************
Child class Constructor
Parent class Constructor
Parent class Instance m1() method
Parent class class m2() method
Parent class static m3() method
NOTE :- Here when we create the object for child class C then automatically the child constructor
and its correspodance methods will be executed itself.
-----------------------------------------------------------------------------------------------------------
class P:
def __init__(self):
print('Parent class Constructor')
def m1(self):
print('Parent class Instance m1() method')
@classmethod
def m2(cls):
print('Parent class class m2() method')
@staticmethod
def m3():
print('Parent class static m3() method')
class C(P):
def method1(self): # child class Instance method.
super().__init__()
super().m1()
super().m2()
super().m3()
c = C() # Object creation for child class C.
c.method1()
-------------------------------------------------------------------------------------------------
class P:
def __init__(self):
print('Parent class Constructor')
def m1(self):
print('Parent class Instance m1() method')
@classmethod
def m2(cls):
print('Parent class class m2() method')
@staticmethod
def m3():
print('Parent class static m3() method')
class C(P):
@classmethod
def method1(cls): # child class Instance method.
super().__init__()
super().m1()
super().m2()
super().m3()
c = C() # Object creation for child class C.
c.method1()
-------------------------------------------------------------------------------------------------------
class P:
def __init__(self):
print('Parent class Constructor')
def m1(self):
print('Parent class Instance m1() method')
@classmethod
def m2(cls):
print('Parent class class m2() method')
@staticmethod
def m3():
print('Parent class static m3() method')
class C(P):
@staticmethod
def method1(): # child class Instance method.
super().__init__()
super().m1()
super().m2()
super().m3()
#c = C() # Object creation for child class C.
#c.method1()
C.method1()
------------------------------------------------------------------------------------------------------------
|
[
"[email protected]"
] | |
bcbf988f50e81324829f054a398cd2482060cfc2
|
640d26baa9322b92ea5d247280668b4ad7475f8d
|
/robot_assignment_ws/build/turtlebot/turtlebot_see/catkin_generated/pkg.installspace.context.pc.py
|
87d357364c348496a3a8053c19362e393f8311c9
|
[] |
no_license
|
JulianVJacobs/Robotics-Project-2021
|
6baa5a6423a28cc278b84d831f2d8c9f5239da90
|
18a58cee8e2793bd05e5e158c0c998099fc62d5c
|
refs/heads/main
| 2023-06-03T02:47:15.579120 | 2021-06-25T19:56:32 | 2021-06-25T19:56:32 | 374,733,611 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 387 |
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "turtlebot_see"
PROJECT_SPACE_DIR = "/home/julian/robot_assignment_ws/install"
PROJECT_VERSION = "0.0.0"
|
[
"[email protected]"
] | |
6ebe306e963ff722a39f95e97c0e7e0176f232db
|
61fc42a1411551a023426705825692b570209f24
|
/seleniumbase/translate/japanese.py
|
54afb345ddc23d76b3f31a9e51052c5a96d40412
|
[
"MIT"
] |
permissive
|
dE1l/SeleniumBase
|
9c33be707cd9773f9c4a53439eed74583ef14f43
|
6020f9fb3dd25700636b3837d5be192387d23acd
|
refs/heads/master
| 2022-04-23T23:22:07.196648 | 2020-04-22T07:18:31 | 2020-04-22T07:18:31 | 257,905,752 | 0 | 0 | null | 2020-04-22T13:13:03 | 2020-04-22T13:13:02 | null |
UTF-8
|
Python
| false | false | 8,569 |
py
|
# Japanese / 日本語 - Translations - Python 3 Only!
from seleniumbase import BaseCase
class セレニウムテストケース(BaseCase): # noqa
def URLを開く(self, *args, **kwargs):
# open(url)
return self.open(*args, **kwargs)
def クリックして(self, *args, **kwargs):
# click(selector)
return self.click(*args, **kwargs)
def ダブルクリックして(self, *args, **kwargs):
# double_click(selector)
return self.double_click(*args, **kwargs)
def ゆっくりクリックして(self, *args, **kwargs):
# slow_click(selector)
return self.slow_click(*args, **kwargs)
def リンクテキストをクリックします(self, *args, **kwargs):
# click_link_text(link_text)
return self.click_link_text(*args, **kwargs)
def テキストを更新(self, *args, **kwargs):
# update_text(selector, new_value)
return self.update_text(*args, **kwargs)
def テキストを追加(self, *args, **kwargs):
# add_text(selector, new_value)
return self.add_text(*args, **kwargs)
def テキストを取得(self, *args, **kwargs):
# get_text(selector, new_value)
return self.get_text(*args, **kwargs)
def テキストを確認する(self, *args, **kwargs):
# assert_text(text, selector)
return self.assert_text(*args, **kwargs)
def 正確なテキストを確認する(self, *args, **kwargs):
# assert_exact_text(text, selector)
return self.assert_exact_text(*args, **kwargs)
def 要素を確認する(self, *args, **kwargs):
# assert_element(selector)
return self.assert_element(*args, **kwargs)
def タイトルを確認(self, *args, **kwargs):
# assert_title(title)
return self.assert_title(*args, **kwargs)
def 検証が正しい(self, *args, **kwargs):
# assert_true(expr)
return self.assert_true(*args, **kwargs)
def 検証は偽です(self, *args, **kwargs):
# assert_false(expr)
return self.assert_false(*args, **kwargs)
def 検証が等しい(self, *args, **kwargs):
# assert_equal(first, second)
return self.assert_equal(*args, **kwargs)
def 検証が等しくない(self, *args, **kwargs):
# assert_not_equal(first, second)
return self.assert_not_equal(*args, **kwargs)
def ページを更新する(self, *args, **kwargs):
# refresh_page()
return self.refresh_page(*args, **kwargs)
def 現在のURLを取得(self, *args, **kwargs):
# get_current_url()
return self.get_current_url(*args, **kwargs)
def ページのソースコードを取得する(self, *args, **kwargs):
# get_page_source()
return self.get_page_source(*args, **kwargs)
def 戻る(self, *args, **kwargs):
# go_back()
return self.go_back(*args, **kwargs)
def 進む(self, *args, **kwargs):
# go_forward()
return self.go_forward(*args, **kwargs)
def テキストが表示されています(self, *args, **kwargs):
# is_text_visible(text, selector="html")
return self.is_text_visible(*args, **kwargs)
def 要素は表示されますか(self, *args, **kwargs):
# is_element_visible(selector)
return self.is_element_visible(*args, **kwargs)
def 要素が存在するかどうか(self, *args, **kwargs):
# is_element_present(selector)
return self.is_element_present(*args, **kwargs)
def テキストを待つ(self, *args, **kwargs):
# wait_for_text(text, selector)
return self.wait_for_text(*args, **kwargs)
def 要素を待つ(self, *args, **kwargs):
# wait_for_element(selector)
return self.wait_for_element(*args, **kwargs)
def 眠る(self, *args, **kwargs):
# sleep(seconds)
return self.sleep(*args, **kwargs)
def を提出す(self, *args, **kwargs):
# submit(selector)
return self.submit(*args, **kwargs)
def JSクリックして(self, *args, **kwargs):
# js_click(selector)
return self.js_click(*args, **kwargs)
def HTMLをチェック(self, *args, **kwargs):
# inspect_html()
return self.inspect_html(*args, **kwargs)
def スクリーンショットを保存(self, *args, **kwargs):
# save_screenshot(name)
return self.save_screenshot(*args, **kwargs)
def ファイルを選択(self, *args, **kwargs):
# choose_file(selector, file_path)
return self.choose_file(*args, **kwargs)
def スクリプトを実行する(self, *args, **kwargs):
# execute_script(script)
return self.execute_script(*args, **kwargs)
def ブロック広告(self, *args, **kwargs):
# ad_block()
return self.ad_block(*args, **kwargs)
def スキップ(self, *args, **kwargs):
# skip(reason="")
return self.skip(*args, **kwargs)
def リンク切れを確認する(self, *args, **kwargs):
# assert_no_404_errors()
return self.assert_no_404_errors(*args, **kwargs)
def JSエラーを確認する(self, *args, **kwargs):
# assert_no_js_errors()
return self.assert_no_js_errors(*args, **kwargs)
def フレームに切り替え(self, *args, **kwargs):
# switch_to_frame(frame)
return self.switch_to_frame(*args, **kwargs)
def デフォルトのコンテンツに切り替える(self, *args, **kwargs):
# switch_to_default_content()
return self.switch_to_default_content(*args, **kwargs)
def 新しいウィンドウを開く(self, *args, **kwargs):
# open_new_window()
return self.open_new_window(*args, **kwargs)
def ウィンドウに切り替え(self, *args, **kwargs):
# switch_to_window(window)
return self.switch_to_window(*args, **kwargs)
def デフォルトのウィンドウに切り替える(self, *args, **kwargs):
# switch_to_default_window()
return self.switch_to_default_window(*args, **kwargs)
def ハイライト(self, *args, **kwargs):
# highlight(selector)
return self.highlight(*args, **kwargs)
def ハイライトしてクリックして(self, *args, **kwargs):
# highlight_click(selector)
return self.highlight_click(*args, **kwargs)
def スクロールして(self, *args, **kwargs):
# scroll_to(selector)
return self.scroll_to(*args, **kwargs)
def 一番上までスクロール(self, *args, **kwargs):
# scroll_to_top()
return self.scroll_to_top(*args, **kwargs)
def 一番下までスクロール(self, *args, **kwargs):
# scroll_to_bottom()
return self.scroll_to_bottom(*args, **kwargs)
def 上にマウスを移動しクリック(self, *args, **kwargs):
# hover_and_click(hover_selector, click_selector)
return self.hover_and_click(*args, **kwargs)
def 選択されていることを(self, *args, **kwargs):
# is_selected(selector)
return self.is_selected(*args, **kwargs)
def 上矢印を押します(self, *args, **kwargs):
# press_up_arrow(selector="html", times=1)
return self.press_up_arrow(*args, **kwargs)
def 下矢印を押します(self, *args, **kwargs):
# press_down_arrow(selector="html", times=1)
return self.press_down_arrow(*args, **kwargs)
def 左矢印を押します(self, *args, **kwargs):
# press_left_arrow(selector="html", times=1)
return self.press_left_arrow(*args, **kwargs)
def 右矢印を押します(self, *args, **kwargs):
# press_right_arrow(selector="html", times=1)
return self.press_right_arrow(*args, **kwargs)
def 表示要素をクリックします(self, *args, **kwargs):
# click_visible_elements(selector)
return self.click_visible_elements(*args, **kwargs)
def テキストでオプションを選択(self, *args, **kwargs):
# select_option_by_text(dropdown_selector, option)
return self.select_option_by_text(*args, **kwargs)
def インデックスでオプションを選択(self, *args, **kwargs):
# select_option_by_index(dropdown_selector, option)
return self.select_option_by_index(*args, **kwargs)
def 値でオプションを選択(self, *args, **kwargs):
# select_option_by_value(dropdown_selector, option)
return self.select_option_by_value(*args, **kwargs)
|
[
"[email protected]"
] | |
4f82d7c8c1484dc4f051d1bbc2e00c5af99f5175
|
7b750c5c9df2fb05e92b16a43767c444404de7ae
|
/src/leetcode/python3/leetcode831.py
|
b5df08e76e5758d1e9c9d370b21a13f7000a71bc
|
[] |
no_license
|
renaissance-codes/leetcode
|
a68c0203fe4f006fa250122614079adfe6582d78
|
de6db120a1e709809d26e3e317c66612e681fb70
|
refs/heads/master
| 2022-08-18T15:05:19.622014 | 2022-08-05T03:34:01 | 2022-08-05T03:34:01 | 200,180,049 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 665 |
py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
隐藏个人信息
"""
class Solution:
def maskPII(self, S: str) -> str:
if "@" in S:
sarray = S.split("@")
firstname = sarray[0].lower()
lastname = sarray[1].lower()
return firstname[0] + "*****" + firstname[-1] + "@" + lastname
else:
nums = []
for s in S:
if s.isdigit():
nums.append(s)
if len(nums) == 10:
return "***-***-" + "".join(nums[-4:])
else:
return "+" + "*" * (len(nums) - 10) + "-***-***-" + "".join(nums[-4:])
|
[
"[email protected]"
] | |
a28b5082bb8fc7c6b6d6ed95d0d894e1c957d6dd
|
7f9dfa2cccf77764940ffcbbf92939e37c138c43
|
/crawl_file/file_path/pylab_examples/colours.py
|
e405f6214b0577b7746527d95670fbfdc7c89f73
|
[] |
no_license
|
zhangmman/scrapy_spider
|
f80bd8d213edde0dea083babe610ca7b1bc449a3
|
2bda4aa29f2550c649c939045ce4fcdea2736187
|
refs/heads/master
| 2020-09-11T13:58:49.930929 | 2019-12-21T08:40:56 | 2019-12-21T08:43:43 | 222,080,585 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,356 |
py
|
# -*- noplot -*-
"""
Some simple functions to generate colours.
"""
import numpy as np
from matplotlib import colors as mcolors
def pastel(colour, weight=2.4):
""" Convert colour into a nice pastel shade"""
rgb = np.asarray(mcolors.to_rgba(colour)[:3])
# scale colour
maxc = max(rgb)
if maxc < 1.0 and maxc > 0:
# scale colour
scale = 1.0 / maxc
rgb = rgb * scale
# now decrease saturation
total = rgb.sum()
slack = 0
for x in rgb:
slack += 1.0 - x
# want to increase weight from total to weight
# pick x s.t. slack * x == weight - total
# x = (weight - total) / slack
x = (weight - total) / slack
rgb = [c + (x * (1.0 - c)) for c in rgb]
return rgb
def get_colours(n):
""" Return n pastel colours. """
base = np.asarray([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
if n <= 3:
return base[0:n]
# how many new colours to we need to insert between
# red and green and between green and blue?
needed = (((n - 3) + 1) / 2, (n - 3) / 2)
colours = []
for start in (0, 1):
for x in np.linspace(0, 1, needed[start] + 2):
colours.append((base[start] * (1.0 - x)) +
(base[start + 1] * x))
return [pastel(c) for c in colours[0:n]]
|
[
"[email protected]"
] | |
f3b32c83e9bff4d7005d532dc83f6857c74e5f3d
|
da90c94fff1959bd2641aaf4d4fd35db75cab802
|
/positive.py
|
503d450d6de21d62e327639572c32c438f64fcab
|
[] |
no_license
|
Python-lab-cycle/Shehara-Ashraf
|
73c9bfc8e03f23be7b381ff09f65cdeae3f3a97e
|
76fb4181a8528b920bc42510cb8c00f5e9777636
|
refs/heads/main
| 2023-06-05T12:34:23.470845 | 2021-07-02T10:27:06 | 2021-07-02T10:27:06 | 326,703,259 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 89 |
py
|
list1=[11,-21,0,45,66,-93]
for num in list1:
if num>=0:
print(num,end=",")
|
[
"[email protected]"
] | |
431403eaa0ceb56b26d30a9e7c72a61329582028
|
8ce0fd5e5c5b858fa24e388f2114885160421c03
|
/python/socket/socket_communication_server.py
|
0a170df96928af5a20e86ab71ac48d87ae3483c5
|
[] |
no_license
|
kong-ling/scripts
|
266e9975ae0156d6fdddf43b8f1d7ee20469b388
|
3c41c49646358d46871c8fd8ebe1ba52bdea046c
|
refs/heads/master
| 2021-01-10T08:29:34.772634 | 2020-01-03T09:04:57 | 2020-01-03T09:04:57 | 43,275,002 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,580 |
py
|
# this is the program for socket communication
# it can be used both for server and client
# python socket_communication.py -server for server
# python socket_communication.py -client for client
# sys.argv[0] : script name
# sys.argv[1] : -server or -client
# sys.argv[2] : string to send to server
import socket
import time
import sys
import os
import datetime
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host = socket.gethostname()
#ec_sise = os.environ['EC_SITE']
ec_sise = ''
#host = host + '.' + ec_sise + '.intel.com'
port = 1234
print('host name is: %s' % host)
# for server point
if '-server' in sys.argv: #server
print('server program running')
sock.bind((host, port))
sock.listen(10)
#connection.settimeout(5)
while True:
try:
print('\n\n')
print(datetime.datetime.now())
print('Wait for new request ...')
print('\n\n')
connection, address = sock.accept()
received = connection.recv(1024)
#send back to counerpart
print('Got connection from %s: %s' % (address, received))
connection.send(received)
#convert the command to sting for host to run
cmd_seq = received.split(' ')
cmd = ''
for seq in cmd_seq:
if '@' not in seq and 'from' not in seq:
cmd = cmd + ' ' + seq
print('received=[%s]' % received)
cmd_strip = cmd.strip()
print('cmd=[%s]' % cmd.strip)
os.system(cmd_strip)
except socket.timeout:
print('time out')
# for client point
if '-client' in sys.argv: #client
#print 'client program running'
sock.connect((host, port))
loop = 1
#send sys.argv[1], and wait for response from server
#if the recevied from the server is the same as sys.argv[1], exit the program
#otherwise, retransmit the string
while True:
#print 'Send \'%s\'' % (sys.argv[2])
#print 'User Name: %s' % os.environ.get('USER')
#request_to_send = sys.argv[2] + ' from ' + os.environ.get('USER') + '@' + host
request_to_send = sys.argv[2]
sock.send(request_to_send) #send to server
response_received = sock.recv(1024) #receive the response from server
if (response_received == request_to_send):
print('Your request is: [%s]' % response_received)
#time.sleep(1);
break
loop += 1
#both server point and client point need this operation
sock.close
|
[
"[email protected]"
] | |
5a99e8654f5c77396ef78c380d534862a0d12d05
|
c5e6f7c89a5cf17944d6d3d6a31d1fc43928b954
|
/django_statsd/__init__.py
|
5ee47936f607f0c853b8a0aef1acc1bc99442bdd
|
[
"BSD-3-Clause"
] |
permissive
|
klaviyo/django-statsd
|
f9d681207e96e2d2fcf6b45f07287f522942b35c
|
42a00a168c5fb8818d7da26f460a533bae5ae7a7
|
refs/heads/master
| 2023-07-23T14:54:42.720518 | 2023-07-07T18:42:29 | 2023-07-07T18:42:29 | 107,584,346 | 0 | 1 |
NOASSERTION
| 2020-12-10T15:46:11 | 2017-10-19T18:37:06 |
Python
|
UTF-8
|
Python
| false | false | 114 |
py
|
from django_statsd import patches
from django_statsd import clients
from django_statsd.plugins import NoseStatsd
|
[
"[email protected]"
] | |
037569816e8001c619300cef12e5522b95fc42a8
|
c16ea32a4cddb6b63ad3bacce3c6db0259d2bacd
|
/google/analytics/admin/v1alpha/google-analytics-admin-v1alpha-py/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py
|
d547448cd7270c8e63923dd04d0b6edfdda031a5
|
[
"Apache-2.0"
] |
permissive
|
dizcology/googleapis-gen
|
74a72b655fba2565233e5a289cfaea6dc7b91e1a
|
478f36572d7bcf1dc66038d0e76b9b3fa2abae63
|
refs/heads/master
| 2023-06-04T15:51:18.380826 | 2021-06-16T20:42:38 | 2021-06-16T20:42:38 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 96,532 |
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.analytics.admin_v1alpha.types import analytics_admin
from google.analytics.admin_v1alpha.types import resources
from google.protobuf import empty_pb2 # type: ignore
from .base import AnalyticsAdminServiceTransport, DEFAULT_CLIENT_INFO
class AnalyticsAdminServiceGrpcTransport(AnalyticsAdminServiceTransport):
"""gRPC backend transport for AnalyticsAdminService.
Service Interface for the Analytics Admin API (GA4).
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(self, *,
host: str = 'analyticsadmin.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(cls,
host: str = 'analyticsadmin.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
**self_signed_jwt_kwargs,
**kwargs
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def get_account(self) -> Callable[
[analytics_admin.GetAccountRequest],
resources.Account]:
r"""Return a callable for the get account method over gRPC.
Lookup for a single Account.
Returns:
Callable[[~.GetAccountRequest],
~.Account]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_account' not in self._stubs:
self._stubs['get_account'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetAccount',
request_serializer=analytics_admin.GetAccountRequest.serialize,
response_deserializer=resources.Account.deserialize,
)
return self._stubs['get_account']
@property
def list_accounts(self) -> Callable[
[analytics_admin.ListAccountsRequest],
analytics_admin.ListAccountsResponse]:
r"""Return a callable for the list accounts method over gRPC.
Returns all accounts accessible by the caller.
Note that these accounts might not currently have GA4
properties. Soft-deleted (ie: "trashed") accounts are
excluded by default. Returns an empty list if no
relevant accounts are found.
Returns:
Callable[[~.ListAccountsRequest],
~.ListAccountsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_accounts' not in self._stubs:
self._stubs['list_accounts'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListAccounts',
request_serializer=analytics_admin.ListAccountsRequest.serialize,
response_deserializer=analytics_admin.ListAccountsResponse.deserialize,
)
return self._stubs['list_accounts']
@property
def delete_account(self) -> Callable[
[analytics_admin.DeleteAccountRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete account method over gRPC.
Marks target Account as soft-deleted (ie: "trashed")
and returns it.
This API does not have a method to restore soft-deleted
accounts. However, they can be restored using the Trash
Can UI.
If the accounts are not restored before the expiration
time, the account and all child resources (eg:
Properties, GoogleAdsLinks, Streams, UserLinks) will be
permanently purged.
https://support.google.com/analytics/answer/6154772
Returns an error if the target is not found.
Returns:
Callable[[~.DeleteAccountRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_account' not in self._stubs:
self._stubs['delete_account'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteAccount',
request_serializer=analytics_admin.DeleteAccountRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_account']
@property
def update_account(self) -> Callable[
[analytics_admin.UpdateAccountRequest],
resources.Account]:
r"""Return a callable for the update account method over gRPC.
Updates an account.
Returns:
Callable[[~.UpdateAccountRequest],
~.Account]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_account' not in self._stubs:
self._stubs['update_account'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateAccount',
request_serializer=analytics_admin.UpdateAccountRequest.serialize,
response_deserializer=resources.Account.deserialize,
)
return self._stubs['update_account']
@property
def provision_account_ticket(self) -> Callable[
[analytics_admin.ProvisionAccountTicketRequest],
analytics_admin.ProvisionAccountTicketResponse]:
r"""Return a callable for the provision account ticket method over gRPC.
Requests a ticket for creating an account.
Returns:
Callable[[~.ProvisionAccountTicketRequest],
~.ProvisionAccountTicketResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'provision_account_ticket' not in self._stubs:
self._stubs['provision_account_ticket'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ProvisionAccountTicket',
request_serializer=analytics_admin.ProvisionAccountTicketRequest.serialize,
response_deserializer=analytics_admin.ProvisionAccountTicketResponse.deserialize,
)
return self._stubs['provision_account_ticket']
@property
def list_account_summaries(self) -> Callable[
[analytics_admin.ListAccountSummariesRequest],
analytics_admin.ListAccountSummariesResponse]:
r"""Return a callable for the list account summaries method over gRPC.
Returns summaries of all accounts accessible by the
caller.
Returns:
Callable[[~.ListAccountSummariesRequest],
~.ListAccountSummariesResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_account_summaries' not in self._stubs:
self._stubs['list_account_summaries'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListAccountSummaries',
request_serializer=analytics_admin.ListAccountSummariesRequest.serialize,
response_deserializer=analytics_admin.ListAccountSummariesResponse.deserialize,
)
return self._stubs['list_account_summaries']
@property
def get_property(self) -> Callable[
[analytics_admin.GetPropertyRequest],
resources.Property]:
r"""Return a callable for the get property method over gRPC.
Lookup for a single "GA4" Property.
Returns:
Callable[[~.GetPropertyRequest],
~.Property]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_property' not in self._stubs:
self._stubs['get_property'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetProperty',
request_serializer=analytics_admin.GetPropertyRequest.serialize,
response_deserializer=resources.Property.deserialize,
)
return self._stubs['get_property']
@property
def list_properties(self) -> Callable[
[analytics_admin.ListPropertiesRequest],
analytics_admin.ListPropertiesResponse]:
r"""Return a callable for the list properties method over gRPC.
Returns child Properties under the specified parent
Account.
Only "GA4" properties will be returned.
Properties will be excluded if the caller does not have
access. Soft-deleted (ie: "trashed") properties are
excluded by default. Returns an empty list if no
relevant properties are found.
Returns:
Callable[[~.ListPropertiesRequest],
~.ListPropertiesResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_properties' not in self._stubs:
self._stubs['list_properties'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListProperties',
request_serializer=analytics_admin.ListPropertiesRequest.serialize,
response_deserializer=analytics_admin.ListPropertiesResponse.deserialize,
)
return self._stubs['list_properties']
@property
def create_property(self) -> Callable[
[analytics_admin.CreatePropertyRequest],
resources.Property]:
r"""Return a callable for the create property method over gRPC.
Creates an "GA4" property with the specified location
and attributes.
Returns:
Callable[[~.CreatePropertyRequest],
~.Property]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_property' not in self._stubs:
self._stubs['create_property'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateProperty',
request_serializer=analytics_admin.CreatePropertyRequest.serialize,
response_deserializer=resources.Property.deserialize,
)
return self._stubs['create_property']
@property
def delete_property(self) -> Callable[
[analytics_admin.DeletePropertyRequest],
resources.Property]:
r"""Return a callable for the delete property method over gRPC.
Marks target Property as soft-deleted (ie: "trashed")
and returns it.
This API does not have a method to restore soft-deleted
properties. However, they can be restored using the
Trash Can UI.
If the properties are not restored before the expiration
time, the Property and all child resources (eg:
GoogleAdsLinks, Streams, UserLinks) will be permanently
purged.
https://support.google.com/analytics/answer/6154772
Returns an error if the target is not found, or is not
an GA4 Property.
Returns:
Callable[[~.DeletePropertyRequest],
~.Property]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_property' not in self._stubs:
self._stubs['delete_property'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteProperty',
request_serializer=analytics_admin.DeletePropertyRequest.serialize,
response_deserializer=resources.Property.deserialize,
)
return self._stubs['delete_property']
@property
def update_property(self) -> Callable[
[analytics_admin.UpdatePropertyRequest],
resources.Property]:
r"""Return a callable for the update property method over gRPC.
Updates a property.
Returns:
Callable[[~.UpdatePropertyRequest],
~.Property]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_property' not in self._stubs:
self._stubs['update_property'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateProperty',
request_serializer=analytics_admin.UpdatePropertyRequest.serialize,
response_deserializer=resources.Property.deserialize,
)
return self._stubs['update_property']
@property
def get_user_link(self) -> Callable[
[analytics_admin.GetUserLinkRequest],
resources.UserLink]:
r"""Return a callable for the get user link method over gRPC.
Gets information about a user's link to an account or
property.
Returns:
Callable[[~.GetUserLinkRequest],
~.UserLink]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_user_link' not in self._stubs:
self._stubs['get_user_link'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetUserLink',
request_serializer=analytics_admin.GetUserLinkRequest.serialize,
response_deserializer=resources.UserLink.deserialize,
)
return self._stubs['get_user_link']
@property
def batch_get_user_links(self) -> Callable[
[analytics_admin.BatchGetUserLinksRequest],
analytics_admin.BatchGetUserLinksResponse]:
r"""Return a callable for the batch get user links method over gRPC.
Gets information about multiple users' links to an
account or property.
Returns:
Callable[[~.BatchGetUserLinksRequest],
~.BatchGetUserLinksResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'batch_get_user_links' not in self._stubs:
self._stubs['batch_get_user_links'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/BatchGetUserLinks',
request_serializer=analytics_admin.BatchGetUserLinksRequest.serialize,
response_deserializer=analytics_admin.BatchGetUserLinksResponse.deserialize,
)
return self._stubs['batch_get_user_links']
@property
def list_user_links(self) -> Callable[
[analytics_admin.ListUserLinksRequest],
analytics_admin.ListUserLinksResponse]:
r"""Return a callable for the list user links method over gRPC.
Lists all user links on an account or property.
Returns:
Callable[[~.ListUserLinksRequest],
~.ListUserLinksResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_user_links' not in self._stubs:
self._stubs['list_user_links'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListUserLinks',
request_serializer=analytics_admin.ListUserLinksRequest.serialize,
response_deserializer=analytics_admin.ListUserLinksResponse.deserialize,
)
return self._stubs['list_user_links']
@property
def audit_user_links(self) -> Callable[
[analytics_admin.AuditUserLinksRequest],
analytics_admin.AuditUserLinksResponse]:
r"""Return a callable for the audit user links method over gRPC.
Lists all user links on an account or property,
including implicit ones that come from effective
permissions granted by groups or organization admin
roles.
If a returned user link does not have direct
permissions, they cannot be removed from the account or
property directly with the DeleteUserLink command. They
have to be removed from the group/etc that gives them
permissions, which is currently only usable/discoverable
in the GA or GMP UIs.
Returns:
Callable[[~.AuditUserLinksRequest],
~.AuditUserLinksResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'audit_user_links' not in self._stubs:
self._stubs['audit_user_links'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/AuditUserLinks',
request_serializer=analytics_admin.AuditUserLinksRequest.serialize,
response_deserializer=analytics_admin.AuditUserLinksResponse.deserialize,
)
return self._stubs['audit_user_links']
@property
def create_user_link(self) -> Callable[
[analytics_admin.CreateUserLinkRequest],
resources.UserLink]:
r"""Return a callable for the create user link method over gRPC.
Creates a user link on an account or property.
If the user with the specified email already has
permissions on the account or property, then the user's
existing permissions will be unioned with the
permissions specified in the new UserLink.
Returns:
Callable[[~.CreateUserLinkRequest],
~.UserLink]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_user_link' not in self._stubs:
self._stubs['create_user_link'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateUserLink',
request_serializer=analytics_admin.CreateUserLinkRequest.serialize,
response_deserializer=resources.UserLink.deserialize,
)
return self._stubs['create_user_link']
@property
def batch_create_user_links(self) -> Callable[
[analytics_admin.BatchCreateUserLinksRequest],
analytics_admin.BatchCreateUserLinksResponse]:
r"""Return a callable for the batch create user links method over gRPC.
Creates information about multiple users' links to an
account or property.
This method is transactional. If any UserLink cannot be
created, none of the UserLinks will be created.
Returns:
Callable[[~.BatchCreateUserLinksRequest],
~.BatchCreateUserLinksResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'batch_create_user_links' not in self._stubs:
self._stubs['batch_create_user_links'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/BatchCreateUserLinks',
request_serializer=analytics_admin.BatchCreateUserLinksRequest.serialize,
response_deserializer=analytics_admin.BatchCreateUserLinksResponse.deserialize,
)
return self._stubs['batch_create_user_links']
@property
def update_user_link(self) -> Callable[
[analytics_admin.UpdateUserLinkRequest],
resources.UserLink]:
r"""Return a callable for the update user link method over gRPC.
Updates a user link on an account or property.
Returns:
Callable[[~.UpdateUserLinkRequest],
~.UserLink]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_user_link' not in self._stubs:
self._stubs['update_user_link'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateUserLink',
request_serializer=analytics_admin.UpdateUserLinkRequest.serialize,
response_deserializer=resources.UserLink.deserialize,
)
return self._stubs['update_user_link']
@property
def batch_update_user_links(self) -> Callable[
[analytics_admin.BatchUpdateUserLinksRequest],
analytics_admin.BatchUpdateUserLinksResponse]:
r"""Return a callable for the batch update user links method over gRPC.
Updates information about multiple users' links to an
account or property.
Returns:
Callable[[~.BatchUpdateUserLinksRequest],
~.BatchUpdateUserLinksResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'batch_update_user_links' not in self._stubs:
self._stubs['batch_update_user_links'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/BatchUpdateUserLinks',
request_serializer=analytics_admin.BatchUpdateUserLinksRequest.serialize,
response_deserializer=analytics_admin.BatchUpdateUserLinksResponse.deserialize,
)
return self._stubs['batch_update_user_links']
@property
def delete_user_link(self) -> Callable[
[analytics_admin.DeleteUserLinkRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete user link method over gRPC.
Deletes a user link on an account or property.
Returns:
Callable[[~.DeleteUserLinkRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_user_link' not in self._stubs:
self._stubs['delete_user_link'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteUserLink',
request_serializer=analytics_admin.DeleteUserLinkRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_user_link']
@property
def batch_delete_user_links(self) -> Callable[
[analytics_admin.BatchDeleteUserLinksRequest],
empty_pb2.Empty]:
r"""Return a callable for the batch delete user links method over gRPC.
Deletes information about multiple users' links to an
account or property.
Returns:
Callable[[~.BatchDeleteUserLinksRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'batch_delete_user_links' not in self._stubs:
self._stubs['batch_delete_user_links'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/BatchDeleteUserLinks',
request_serializer=analytics_admin.BatchDeleteUserLinksRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['batch_delete_user_links']
@property
def get_web_data_stream(self) -> Callable[
[analytics_admin.GetWebDataStreamRequest],
resources.WebDataStream]:
r"""Return a callable for the get web data stream method over gRPC.
Lookup for a single WebDataStream
Returns:
Callable[[~.GetWebDataStreamRequest],
~.WebDataStream]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_web_data_stream' not in self._stubs:
self._stubs['get_web_data_stream'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetWebDataStream',
request_serializer=analytics_admin.GetWebDataStreamRequest.serialize,
response_deserializer=resources.WebDataStream.deserialize,
)
return self._stubs['get_web_data_stream']
@property
def delete_web_data_stream(self) -> Callable[
[analytics_admin.DeleteWebDataStreamRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete web data stream method over gRPC.
Deletes a web stream on a property.
Returns:
Callable[[~.DeleteWebDataStreamRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_web_data_stream' not in self._stubs:
self._stubs['delete_web_data_stream'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteWebDataStream',
request_serializer=analytics_admin.DeleteWebDataStreamRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_web_data_stream']
@property
def update_web_data_stream(self) -> Callable[
[analytics_admin.UpdateWebDataStreamRequest],
resources.WebDataStream]:
r"""Return a callable for the update web data stream method over gRPC.
Updates a web stream on a property.
Returns:
Callable[[~.UpdateWebDataStreamRequest],
~.WebDataStream]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_web_data_stream' not in self._stubs:
self._stubs['update_web_data_stream'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateWebDataStream',
request_serializer=analytics_admin.UpdateWebDataStreamRequest.serialize,
response_deserializer=resources.WebDataStream.deserialize,
)
return self._stubs['update_web_data_stream']
@property
def create_web_data_stream(self) -> Callable[
[analytics_admin.CreateWebDataStreamRequest],
resources.WebDataStream]:
r"""Return a callable for the create web data stream method over gRPC.
Creates a web stream with the specified location and
attributes.
Returns:
Callable[[~.CreateWebDataStreamRequest],
~.WebDataStream]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_web_data_stream' not in self._stubs:
self._stubs['create_web_data_stream'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateWebDataStream',
request_serializer=analytics_admin.CreateWebDataStreamRequest.serialize,
response_deserializer=resources.WebDataStream.deserialize,
)
return self._stubs['create_web_data_stream']
@property
def list_web_data_streams(self) -> Callable[
[analytics_admin.ListWebDataStreamsRequest],
analytics_admin.ListWebDataStreamsResponse]:
r"""Return a callable for the list web data streams method over gRPC.
Returns child web data streams under the specified
parent property.
Web data streams will be excluded if the caller does not
have access. Returns an empty list if no relevant web
data streams are found.
Returns:
Callable[[~.ListWebDataStreamsRequest],
~.ListWebDataStreamsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_web_data_streams' not in self._stubs:
self._stubs['list_web_data_streams'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListWebDataStreams',
request_serializer=analytics_admin.ListWebDataStreamsRequest.serialize,
response_deserializer=analytics_admin.ListWebDataStreamsResponse.deserialize,
)
return self._stubs['list_web_data_streams']
@property
def get_ios_app_data_stream(self) -> Callable[
[analytics_admin.GetIosAppDataStreamRequest],
resources.IosAppDataStream]:
r"""Return a callable for the get ios app data stream method over gRPC.
Lookup for a single IosAppDataStream
Returns:
Callable[[~.GetIosAppDataStreamRequest],
~.IosAppDataStream]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_ios_app_data_stream' not in self._stubs:
self._stubs['get_ios_app_data_stream'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetIosAppDataStream',
request_serializer=analytics_admin.GetIosAppDataStreamRequest.serialize,
response_deserializer=resources.IosAppDataStream.deserialize,
)
return self._stubs['get_ios_app_data_stream']
@property
def delete_ios_app_data_stream(self) -> Callable[
[analytics_admin.DeleteIosAppDataStreamRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete ios app data stream method over gRPC.
Deletes an iOS app stream on a property.
Returns:
Callable[[~.DeleteIosAppDataStreamRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_ios_app_data_stream' not in self._stubs:
self._stubs['delete_ios_app_data_stream'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteIosAppDataStream',
request_serializer=analytics_admin.DeleteIosAppDataStreamRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_ios_app_data_stream']
@property
def update_ios_app_data_stream(self) -> Callable[
[analytics_admin.UpdateIosAppDataStreamRequest],
resources.IosAppDataStream]:
r"""Return a callable for the update ios app data stream method over gRPC.
Updates an iOS app stream on a property.
Returns:
Callable[[~.UpdateIosAppDataStreamRequest],
~.IosAppDataStream]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_ios_app_data_stream' not in self._stubs:
self._stubs['update_ios_app_data_stream'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateIosAppDataStream',
request_serializer=analytics_admin.UpdateIosAppDataStreamRequest.serialize,
response_deserializer=resources.IosAppDataStream.deserialize,
)
return self._stubs['update_ios_app_data_stream']
@property
def list_ios_app_data_streams(self) -> Callable[
[analytics_admin.ListIosAppDataStreamsRequest],
analytics_admin.ListIosAppDataStreamsResponse]:
r"""Return a callable for the list ios app data streams method over gRPC.
Returns child iOS app data streams under the
specified parent property.
iOS app data streams will be excluded if the caller does
not have access. Returns an empty list if no relevant
iOS app data streams are found.
Returns:
Callable[[~.ListIosAppDataStreamsRequest],
~.ListIosAppDataStreamsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_ios_app_data_streams' not in self._stubs:
self._stubs['list_ios_app_data_streams'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListIosAppDataStreams',
request_serializer=analytics_admin.ListIosAppDataStreamsRequest.serialize,
response_deserializer=analytics_admin.ListIosAppDataStreamsResponse.deserialize,
)
return self._stubs['list_ios_app_data_streams']
@property
def get_android_app_data_stream(self) -> Callable[
[analytics_admin.GetAndroidAppDataStreamRequest],
resources.AndroidAppDataStream]:
r"""Return a callable for the get android app data stream method over gRPC.
Lookup for a single AndroidAppDataStream
Returns:
Callable[[~.GetAndroidAppDataStreamRequest],
~.AndroidAppDataStream]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_android_app_data_stream' not in self._stubs:
self._stubs['get_android_app_data_stream'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetAndroidAppDataStream',
request_serializer=analytics_admin.GetAndroidAppDataStreamRequest.serialize,
response_deserializer=resources.AndroidAppDataStream.deserialize,
)
return self._stubs['get_android_app_data_stream']
@property
def delete_android_app_data_stream(self) -> Callable[
[analytics_admin.DeleteAndroidAppDataStreamRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete android app data stream method over gRPC.
Deletes an android app stream on a property.
Returns:
Callable[[~.DeleteAndroidAppDataStreamRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_android_app_data_stream' not in self._stubs:
self._stubs['delete_android_app_data_stream'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteAndroidAppDataStream',
request_serializer=analytics_admin.DeleteAndroidAppDataStreamRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_android_app_data_stream']
@property
def update_android_app_data_stream(self) -> Callable[
[analytics_admin.UpdateAndroidAppDataStreamRequest],
resources.AndroidAppDataStream]:
r"""Return a callable for the update android app data stream method over gRPC.
Updates an android app stream on a property.
Returns:
Callable[[~.UpdateAndroidAppDataStreamRequest],
~.AndroidAppDataStream]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_android_app_data_stream' not in self._stubs:
self._stubs['update_android_app_data_stream'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateAndroidAppDataStream',
request_serializer=analytics_admin.UpdateAndroidAppDataStreamRequest.serialize,
response_deserializer=resources.AndroidAppDataStream.deserialize,
)
return self._stubs['update_android_app_data_stream']
@property
def list_android_app_data_streams(self) -> Callable[
[analytics_admin.ListAndroidAppDataStreamsRequest],
analytics_admin.ListAndroidAppDataStreamsResponse]:
r"""Return a callable for the list android app data streams method over gRPC.
Returns child android app streams under the specified
parent property.
Android app streams will be excluded if the caller does
not have access. Returns an empty list if no relevant
android app streams are found.
Returns:
Callable[[~.ListAndroidAppDataStreamsRequest],
~.ListAndroidAppDataStreamsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_android_app_data_streams' not in self._stubs:
self._stubs['list_android_app_data_streams'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListAndroidAppDataStreams',
request_serializer=analytics_admin.ListAndroidAppDataStreamsRequest.serialize,
response_deserializer=analytics_admin.ListAndroidAppDataStreamsResponse.deserialize,
)
return self._stubs['list_android_app_data_streams']
@property
def get_enhanced_measurement_settings(self) -> Callable[
[analytics_admin.GetEnhancedMeasurementSettingsRequest],
resources.EnhancedMeasurementSettings]:
r"""Return a callable for the get enhanced measurement
settings method over gRPC.
Returns the singleton enhanced measurement settings
for this web stream. Note that the stream must enable
enhanced measurement for these settings to take effect.
Returns:
Callable[[~.GetEnhancedMeasurementSettingsRequest],
~.EnhancedMeasurementSettings]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_enhanced_measurement_settings' not in self._stubs:
self._stubs['get_enhanced_measurement_settings'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetEnhancedMeasurementSettings',
request_serializer=analytics_admin.GetEnhancedMeasurementSettingsRequest.serialize,
response_deserializer=resources.EnhancedMeasurementSettings.deserialize,
)
return self._stubs['get_enhanced_measurement_settings']
@property
def update_enhanced_measurement_settings(self) -> Callable[
[analytics_admin.UpdateEnhancedMeasurementSettingsRequest],
resources.EnhancedMeasurementSettings]:
r"""Return a callable for the update enhanced measurement
settings method over gRPC.
Updates the singleton enhanced measurement settings
for this web stream. Note that the stream must enable
enhanced measurement for these settings to take effect.
Returns:
Callable[[~.UpdateEnhancedMeasurementSettingsRequest],
~.EnhancedMeasurementSettings]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_enhanced_measurement_settings' not in self._stubs:
self._stubs['update_enhanced_measurement_settings'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateEnhancedMeasurementSettings',
request_serializer=analytics_admin.UpdateEnhancedMeasurementSettingsRequest.serialize,
response_deserializer=resources.EnhancedMeasurementSettings.deserialize,
)
return self._stubs['update_enhanced_measurement_settings']
@property
def create_firebase_link(self) -> Callable[
[analytics_admin.CreateFirebaseLinkRequest],
resources.FirebaseLink]:
r"""Return a callable for the create firebase link method over gRPC.
Creates a FirebaseLink.
Properties can have at most one FirebaseLink.
Returns:
Callable[[~.CreateFirebaseLinkRequest],
~.FirebaseLink]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_firebase_link' not in self._stubs:
self._stubs['create_firebase_link'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateFirebaseLink',
request_serializer=analytics_admin.CreateFirebaseLinkRequest.serialize,
response_deserializer=resources.FirebaseLink.deserialize,
)
return self._stubs['create_firebase_link']
@property
def update_firebase_link(self) -> Callable[
[analytics_admin.UpdateFirebaseLinkRequest],
resources.FirebaseLink]:
r"""Return a callable for the update firebase link method over gRPC.
Updates a FirebaseLink on a property
Returns:
Callable[[~.UpdateFirebaseLinkRequest],
~.FirebaseLink]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_firebase_link' not in self._stubs:
self._stubs['update_firebase_link'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateFirebaseLink',
request_serializer=analytics_admin.UpdateFirebaseLinkRequest.serialize,
response_deserializer=resources.FirebaseLink.deserialize,
)
return self._stubs['update_firebase_link']
@property
def delete_firebase_link(self) -> Callable[
[analytics_admin.DeleteFirebaseLinkRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete firebase link method over gRPC.
Deletes a FirebaseLink on a property
Returns:
Callable[[~.DeleteFirebaseLinkRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_firebase_link' not in self._stubs:
self._stubs['delete_firebase_link'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteFirebaseLink',
request_serializer=analytics_admin.DeleteFirebaseLinkRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_firebase_link']
@property
def list_firebase_links(self) -> Callable[
[analytics_admin.ListFirebaseLinksRequest],
analytics_admin.ListFirebaseLinksResponse]:
r"""Return a callable for the list firebase links method over gRPC.
Lists FirebaseLinks on a property.
Properties can have at most one FirebaseLink.
Returns:
Callable[[~.ListFirebaseLinksRequest],
~.ListFirebaseLinksResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_firebase_links' not in self._stubs:
self._stubs['list_firebase_links'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListFirebaseLinks',
request_serializer=analytics_admin.ListFirebaseLinksRequest.serialize,
response_deserializer=analytics_admin.ListFirebaseLinksResponse.deserialize,
)
return self._stubs['list_firebase_links']
@property
def get_global_site_tag(self) -> Callable[
[analytics_admin.GetGlobalSiteTagRequest],
resources.GlobalSiteTag]:
r"""Return a callable for the get global site tag method over gRPC.
Returns the Site Tag for the specified web stream.
Site Tags are immutable singletons.
Returns:
Callable[[~.GetGlobalSiteTagRequest],
~.GlobalSiteTag]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_global_site_tag' not in self._stubs:
self._stubs['get_global_site_tag'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetGlobalSiteTag',
request_serializer=analytics_admin.GetGlobalSiteTagRequest.serialize,
response_deserializer=resources.GlobalSiteTag.deserialize,
)
return self._stubs['get_global_site_tag']
@property
def create_google_ads_link(self) -> Callable[
[analytics_admin.CreateGoogleAdsLinkRequest],
resources.GoogleAdsLink]:
r"""Return a callable for the create google ads link method over gRPC.
Creates a GoogleAdsLink.
Returns:
Callable[[~.CreateGoogleAdsLinkRequest],
~.GoogleAdsLink]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_google_ads_link' not in self._stubs:
self._stubs['create_google_ads_link'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateGoogleAdsLink',
request_serializer=analytics_admin.CreateGoogleAdsLinkRequest.serialize,
response_deserializer=resources.GoogleAdsLink.deserialize,
)
return self._stubs['create_google_ads_link']
@property
def update_google_ads_link(self) -> Callable[
[analytics_admin.UpdateGoogleAdsLinkRequest],
resources.GoogleAdsLink]:
r"""Return a callable for the update google ads link method over gRPC.
Updates a GoogleAdsLink on a property
Returns:
Callable[[~.UpdateGoogleAdsLinkRequest],
~.GoogleAdsLink]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_google_ads_link' not in self._stubs:
self._stubs['update_google_ads_link'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateGoogleAdsLink',
request_serializer=analytics_admin.UpdateGoogleAdsLinkRequest.serialize,
response_deserializer=resources.GoogleAdsLink.deserialize,
)
return self._stubs['update_google_ads_link']
@property
def delete_google_ads_link(self) -> Callable[
[analytics_admin.DeleteGoogleAdsLinkRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete google ads link method over gRPC.
Deletes a GoogleAdsLink on a property
Returns:
Callable[[~.DeleteGoogleAdsLinkRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_google_ads_link' not in self._stubs:
self._stubs['delete_google_ads_link'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteGoogleAdsLink',
request_serializer=analytics_admin.DeleteGoogleAdsLinkRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_google_ads_link']
@property
def list_google_ads_links(self) -> Callable[
[analytics_admin.ListGoogleAdsLinksRequest],
analytics_admin.ListGoogleAdsLinksResponse]:
r"""Return a callable for the list google ads links method over gRPC.
Lists GoogleAdsLinks on a property.
Returns:
Callable[[~.ListGoogleAdsLinksRequest],
~.ListGoogleAdsLinksResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_google_ads_links' not in self._stubs:
self._stubs['list_google_ads_links'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListGoogleAdsLinks',
request_serializer=analytics_admin.ListGoogleAdsLinksRequest.serialize,
response_deserializer=analytics_admin.ListGoogleAdsLinksResponse.deserialize,
)
return self._stubs['list_google_ads_links']
@property
def get_data_sharing_settings(self) -> Callable[
[analytics_admin.GetDataSharingSettingsRequest],
resources.DataSharingSettings]:
r"""Return a callable for the get data sharing settings method over gRPC.
Get data sharing settings on an account.
Data sharing settings are singletons.
Returns:
Callable[[~.GetDataSharingSettingsRequest],
~.DataSharingSettings]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_data_sharing_settings' not in self._stubs:
self._stubs['get_data_sharing_settings'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetDataSharingSettings',
request_serializer=analytics_admin.GetDataSharingSettingsRequest.serialize,
response_deserializer=resources.DataSharingSettings.deserialize,
)
return self._stubs['get_data_sharing_settings']
@property
def get_measurement_protocol_secret(self) -> Callable[
[analytics_admin.GetMeasurementProtocolSecretRequest],
resources.MeasurementProtocolSecret]:
r"""Return a callable for the get measurement protocol
secret method over gRPC.
Lookup for a single "GA4" MeasurementProtocolSecret.
Returns:
Callable[[~.GetMeasurementProtocolSecretRequest],
~.MeasurementProtocolSecret]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_measurement_protocol_secret' not in self._stubs:
self._stubs['get_measurement_protocol_secret'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetMeasurementProtocolSecret',
request_serializer=analytics_admin.GetMeasurementProtocolSecretRequest.serialize,
response_deserializer=resources.MeasurementProtocolSecret.deserialize,
)
return self._stubs['get_measurement_protocol_secret']
@property
def list_measurement_protocol_secrets(self) -> Callable[
[analytics_admin.ListMeasurementProtocolSecretsRequest],
analytics_admin.ListMeasurementProtocolSecretsResponse]:
r"""Return a callable for the list measurement protocol
secrets method over gRPC.
Returns child MeasurementProtocolSecrets under the
specified parent Property.
Returns:
Callable[[~.ListMeasurementProtocolSecretsRequest],
~.ListMeasurementProtocolSecretsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_measurement_protocol_secrets' not in self._stubs:
self._stubs['list_measurement_protocol_secrets'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListMeasurementProtocolSecrets',
request_serializer=analytics_admin.ListMeasurementProtocolSecretsRequest.serialize,
response_deserializer=analytics_admin.ListMeasurementProtocolSecretsResponse.deserialize,
)
return self._stubs['list_measurement_protocol_secrets']
@property
def create_measurement_protocol_secret(self) -> Callable[
[analytics_admin.CreateMeasurementProtocolSecretRequest],
resources.MeasurementProtocolSecret]:
r"""Return a callable for the create measurement protocol
secret method over gRPC.
Creates a measurement protocol secret.
Returns:
Callable[[~.CreateMeasurementProtocolSecretRequest],
~.MeasurementProtocolSecret]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_measurement_protocol_secret' not in self._stubs:
self._stubs['create_measurement_protocol_secret'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateMeasurementProtocolSecret',
request_serializer=analytics_admin.CreateMeasurementProtocolSecretRequest.serialize,
response_deserializer=resources.MeasurementProtocolSecret.deserialize,
)
return self._stubs['create_measurement_protocol_secret']
@property
def delete_measurement_protocol_secret(self) -> Callable[
[analytics_admin.DeleteMeasurementProtocolSecretRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete measurement protocol
secret method over gRPC.
Deletes target MeasurementProtocolSecret.
Returns:
Callable[[~.DeleteMeasurementProtocolSecretRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_measurement_protocol_secret' not in self._stubs:
self._stubs['delete_measurement_protocol_secret'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteMeasurementProtocolSecret',
request_serializer=analytics_admin.DeleteMeasurementProtocolSecretRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_measurement_protocol_secret']
@property
def update_measurement_protocol_secret(self) -> Callable[
[analytics_admin.UpdateMeasurementProtocolSecretRequest],
resources.MeasurementProtocolSecret]:
r"""Return a callable for the update measurement protocol
secret method over gRPC.
Updates a measurement protocol secret.
Returns:
Callable[[~.UpdateMeasurementProtocolSecretRequest],
~.MeasurementProtocolSecret]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_measurement_protocol_secret' not in self._stubs:
self._stubs['update_measurement_protocol_secret'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateMeasurementProtocolSecret',
request_serializer=analytics_admin.UpdateMeasurementProtocolSecretRequest.serialize,
response_deserializer=resources.MeasurementProtocolSecret.deserialize,
)
return self._stubs['update_measurement_protocol_secret']
@property
def search_change_history_events(self) -> Callable[
[analytics_admin.SearchChangeHistoryEventsRequest],
analytics_admin.SearchChangeHistoryEventsResponse]:
r"""Return a callable for the search change history events method over gRPC.
Searches through all changes to an account or its
children given the specified set of filters.
Returns:
Callable[[~.SearchChangeHistoryEventsRequest],
~.SearchChangeHistoryEventsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'search_change_history_events' not in self._stubs:
self._stubs['search_change_history_events'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/SearchChangeHistoryEvents',
request_serializer=analytics_admin.SearchChangeHistoryEventsRequest.serialize,
response_deserializer=analytics_admin.SearchChangeHistoryEventsResponse.deserialize,
)
return self._stubs['search_change_history_events']
@property
def get_google_signals_settings(self) -> Callable[
[analytics_admin.GetGoogleSignalsSettingsRequest],
resources.GoogleSignalsSettings]:
r"""Return a callable for the get google signals settings method over gRPC.
Lookup for Google Signals settings for a property.
Returns:
Callable[[~.GetGoogleSignalsSettingsRequest],
~.GoogleSignalsSettings]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_google_signals_settings' not in self._stubs:
self._stubs['get_google_signals_settings'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetGoogleSignalsSettings',
request_serializer=analytics_admin.GetGoogleSignalsSettingsRequest.serialize,
response_deserializer=resources.GoogleSignalsSettings.deserialize,
)
return self._stubs['get_google_signals_settings']
@property
def update_google_signals_settings(self) -> Callable[
[analytics_admin.UpdateGoogleSignalsSettingsRequest],
resources.GoogleSignalsSettings]:
r"""Return a callable for the update google signals settings method over gRPC.
Updates Google Signals settings for a property.
Returns:
Callable[[~.UpdateGoogleSignalsSettingsRequest],
~.GoogleSignalsSettings]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_google_signals_settings' not in self._stubs:
self._stubs['update_google_signals_settings'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateGoogleSignalsSettings',
request_serializer=analytics_admin.UpdateGoogleSignalsSettingsRequest.serialize,
response_deserializer=resources.GoogleSignalsSettings.deserialize,
)
return self._stubs['update_google_signals_settings']
@property
def create_conversion_event(self) -> Callable[
[analytics_admin.CreateConversionEventRequest],
resources.ConversionEvent]:
r"""Return a callable for the create conversion event method over gRPC.
Creates a conversion event with the specified
attributes.
Returns:
Callable[[~.CreateConversionEventRequest],
~.ConversionEvent]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_conversion_event' not in self._stubs:
self._stubs['create_conversion_event'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateConversionEvent',
request_serializer=analytics_admin.CreateConversionEventRequest.serialize,
response_deserializer=resources.ConversionEvent.deserialize,
)
return self._stubs['create_conversion_event']
@property
def get_conversion_event(self) -> Callable[
[analytics_admin.GetConversionEventRequest],
resources.ConversionEvent]:
r"""Return a callable for the get conversion event method over gRPC.
Retrieve a single conversion event.
Returns:
Callable[[~.GetConversionEventRequest],
~.ConversionEvent]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_conversion_event' not in self._stubs:
self._stubs['get_conversion_event'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetConversionEvent',
request_serializer=analytics_admin.GetConversionEventRequest.serialize,
response_deserializer=resources.ConversionEvent.deserialize,
)
return self._stubs['get_conversion_event']
@property
def delete_conversion_event(self) -> Callable[
[analytics_admin.DeleteConversionEventRequest],
empty_pb2.Empty]:
r"""Return a callable for the delete conversion event method over gRPC.
Deletes a conversion event in a property.
Returns:
Callable[[~.DeleteConversionEventRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'delete_conversion_event' not in self._stubs:
self._stubs['delete_conversion_event'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/DeleteConversionEvent',
request_serializer=analytics_admin.DeleteConversionEventRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['delete_conversion_event']
@property
def list_conversion_events(self) -> Callable[
[analytics_admin.ListConversionEventsRequest],
analytics_admin.ListConversionEventsResponse]:
r"""Return a callable for the list conversion events method over gRPC.
Returns a list of conversion events in the specified
parent property.
Returns an empty list if no conversion events are found.
Returns:
Callable[[~.ListConversionEventsRequest],
~.ListConversionEventsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_conversion_events' not in self._stubs:
self._stubs['list_conversion_events'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListConversionEvents',
request_serializer=analytics_admin.ListConversionEventsRequest.serialize,
response_deserializer=analytics_admin.ListConversionEventsResponse.deserialize,
)
return self._stubs['list_conversion_events']
@property
def create_custom_dimension(self) -> Callable[
[analytics_admin.CreateCustomDimensionRequest],
resources.CustomDimension]:
r"""Return a callable for the create custom dimension method over gRPC.
Creates a CustomDimension.
Returns:
Callable[[~.CreateCustomDimensionRequest],
~.CustomDimension]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_custom_dimension' not in self._stubs:
self._stubs['create_custom_dimension'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateCustomDimension',
request_serializer=analytics_admin.CreateCustomDimensionRequest.serialize,
response_deserializer=resources.CustomDimension.deserialize,
)
return self._stubs['create_custom_dimension']
@property
def update_custom_dimension(self) -> Callable[
[analytics_admin.UpdateCustomDimensionRequest],
resources.CustomDimension]:
r"""Return a callable for the update custom dimension method over gRPC.
Updates a CustomDimension on a property.
Returns:
Callable[[~.UpdateCustomDimensionRequest],
~.CustomDimension]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_custom_dimension' not in self._stubs:
self._stubs['update_custom_dimension'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateCustomDimension',
request_serializer=analytics_admin.UpdateCustomDimensionRequest.serialize,
response_deserializer=resources.CustomDimension.deserialize,
)
return self._stubs['update_custom_dimension']
@property
def list_custom_dimensions(self) -> Callable[
[analytics_admin.ListCustomDimensionsRequest],
analytics_admin.ListCustomDimensionsResponse]:
r"""Return a callable for the list custom dimensions method over gRPC.
Lists CustomDimensions on a property.
Returns:
Callable[[~.ListCustomDimensionsRequest],
~.ListCustomDimensionsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_custom_dimensions' not in self._stubs:
self._stubs['list_custom_dimensions'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListCustomDimensions',
request_serializer=analytics_admin.ListCustomDimensionsRequest.serialize,
response_deserializer=analytics_admin.ListCustomDimensionsResponse.deserialize,
)
return self._stubs['list_custom_dimensions']
@property
def archive_custom_dimension(self) -> Callable[
[analytics_admin.ArchiveCustomDimensionRequest],
empty_pb2.Empty]:
r"""Return a callable for the archive custom dimension method over gRPC.
Archives a CustomDimension on a property.
Returns:
Callable[[~.ArchiveCustomDimensionRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'archive_custom_dimension' not in self._stubs:
self._stubs['archive_custom_dimension'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ArchiveCustomDimension',
request_serializer=analytics_admin.ArchiveCustomDimensionRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['archive_custom_dimension']
@property
def get_custom_dimension(self) -> Callable[
[analytics_admin.GetCustomDimensionRequest],
resources.CustomDimension]:
r"""Return a callable for the get custom dimension method over gRPC.
Lookup for a single CustomDimension.
Returns:
Callable[[~.GetCustomDimensionRequest],
~.CustomDimension]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_custom_dimension' not in self._stubs:
self._stubs['get_custom_dimension'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetCustomDimension',
request_serializer=analytics_admin.GetCustomDimensionRequest.serialize,
response_deserializer=resources.CustomDimension.deserialize,
)
return self._stubs['get_custom_dimension']
@property
def create_custom_metric(self) -> Callable[
[analytics_admin.CreateCustomMetricRequest],
resources.CustomMetric]:
r"""Return a callable for the create custom metric method over gRPC.
Creates a CustomMetric.
Returns:
Callable[[~.CreateCustomMetricRequest],
~.CustomMetric]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'create_custom_metric' not in self._stubs:
self._stubs['create_custom_metric'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/CreateCustomMetric',
request_serializer=analytics_admin.CreateCustomMetricRequest.serialize,
response_deserializer=resources.CustomMetric.deserialize,
)
return self._stubs['create_custom_metric']
@property
def update_custom_metric(self) -> Callable[
[analytics_admin.UpdateCustomMetricRequest],
resources.CustomMetric]:
r"""Return a callable for the update custom metric method over gRPC.
Updates a CustomMetric on a property.
Returns:
Callable[[~.UpdateCustomMetricRequest],
~.CustomMetric]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'update_custom_metric' not in self._stubs:
self._stubs['update_custom_metric'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/UpdateCustomMetric',
request_serializer=analytics_admin.UpdateCustomMetricRequest.serialize,
response_deserializer=resources.CustomMetric.deserialize,
)
return self._stubs['update_custom_metric']
@property
def list_custom_metrics(self) -> Callable[
[analytics_admin.ListCustomMetricsRequest],
analytics_admin.ListCustomMetricsResponse]:
r"""Return a callable for the list custom metrics method over gRPC.
Lists CustomMetrics on a property.
Returns:
Callable[[~.ListCustomMetricsRequest],
~.ListCustomMetricsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list_custom_metrics' not in self._stubs:
self._stubs['list_custom_metrics'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ListCustomMetrics',
request_serializer=analytics_admin.ListCustomMetricsRequest.serialize,
response_deserializer=analytics_admin.ListCustomMetricsResponse.deserialize,
)
return self._stubs['list_custom_metrics']
@property
def archive_custom_metric(self) -> Callable[
[analytics_admin.ArchiveCustomMetricRequest],
empty_pb2.Empty]:
r"""Return a callable for the archive custom metric method over gRPC.
Archives a CustomMetric on a property.
Returns:
Callable[[~.ArchiveCustomMetricRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'archive_custom_metric' not in self._stubs:
self._stubs['archive_custom_metric'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/ArchiveCustomMetric',
request_serializer=analytics_admin.ArchiveCustomMetricRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs['archive_custom_metric']
@property
def get_custom_metric(self) -> Callable[
[analytics_admin.GetCustomMetricRequest],
resources.CustomMetric]:
r"""Return a callable for the get custom metric method over gRPC.
Lookup for a single CustomMetric.
Returns:
Callable[[~.GetCustomMetricRequest],
~.CustomMetric]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'get_custom_metric' not in self._stubs:
self._stubs['get_custom_metric'] = self.grpc_channel.unary_unary(
'/google.analytics.admin.v1alpha.AnalyticsAdminService/GetCustomMetric',
request_serializer=analytics_admin.GetCustomMetricRequest.serialize,
response_deserializer=resources.CustomMetric.deserialize,
)
return self._stubs['get_custom_metric']
__all__ = (
'AnalyticsAdminServiceGrpcTransport',
)
|
[
"bazel-bot-development[bot]@users.noreply.github.com"
] |
bazel-bot-development[bot]@users.noreply.github.com
|
1cfc9599d03cc840fc64b3918207676e2e667af4
|
7b63b5b068ee4cdf876a4cbbe16b8a8a7eb21b42
|
/src/lib/models/networks/mobilenetv3_centernet.py
|
d9056a1ba010d8f560d23a75fe3dc6571c80f048
|
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
akirasosa/CenterNet
|
913a2dd9dbff46063a5f257da06256fea054bfe8
|
5f865dda53b1cd6001dad3b7d13a74a7dc4b7ee9
|
refs/heads/master
| 2020-08-07T03:32:55.214345 | 2019-10-23T09:54:54 | 2019-10-23T09:54:54 | 213,280,991 | 0 | 0 |
MIT
| 2019-10-07T02:38:34 | 2019-10-07T02:38:34 | null |
UTF-8
|
Python
| false | false | 11,201 |
py
|
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao ([email protected])
# Modified by Dequan Wang and Xingyi Zhou
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import math
import torch
import torch.nn as nn
# from .DCNv2.dcn_v2 import DCN
# from modules import DeformConv
# from modules import DeformConvPack
from torch.utils import model_zoo
BN_MOMENTUM = 0.1
logger = logging.getLogger(__name__)
def fill_up_weights(up):
w = up.weight.data
f = math.ceil(w.size(2) / 2)
c = (2 * f - 1 - f % 2) / (2. * f)
for i in range(w.size(2)):
for j in range(w.size(3)):
w[0, 0, i, j] = \
(1 - math.fabs(i / f - c)) * (1 - math.fabs(j / f - c))
for c in range(1, w.size(0)):
w[c, 0, :, :] = w[0, 0, :, :]
def fill_fc_weights(layers):
for m in layers.modules():
if isinstance(m, nn.Conv2d):
nn.init.normal_(m.weight, std=0.001)
# torch.nn.init.kaiming_normal_(m.weight.data, nonlinearity='relu')
# torch.nn.init.xavier_normal_(m.weight.data)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def _make_divisible(v, divisor, min_value=None):
"""
This function is taken from the original tf repo.
It ensures that all layers have a channel number that is divisible by 8
It can be seen here:
https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py
:param v:
:param divisor:
:param min_value:
:return:
"""
if min_value is None:
min_value = divisor
new_v = max(min_value, int(v + divisor / 2) // divisor * divisor)
# Make sure that round down does not go down by more than 10%.
if new_v < 0.9 * v:
new_v += divisor
return new_v
class h_sigmoid(nn.Module):
def __init__(self, inplace=True):
super(h_sigmoid, self).__init__()
self.relu = nn.ReLU6(inplace=inplace)
def forward(self, x):
return self.relu(x + 3) / 6
class h_swish(nn.Module):
def __init__(self, inplace=True):
super(h_swish, self).__init__()
self.sigmoid = h_sigmoid(inplace=inplace)
def forward(self, x):
return x * self.sigmoid(x)
class SELayer(nn.Module):
def __init__(self, channel, reduction=4):
super(SELayer, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc = nn.Sequential(
nn.Linear(channel, channel // reduction),
nn.ReLU(inplace=True),
nn.Linear(channel // reduction, channel),
h_sigmoid()
)
def forward(self, x):
b, c, _, _ = x.size()
y = self.avg_pool(x).view(b, c)
y = self.fc(y).view(b, c, 1, 1)
return x * y
def conv_3x3_bn(inp, oup, stride):
return nn.Sequential(
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
nn.BatchNorm2d(oup),
h_swish()
)
def conv_1x1_bn(inp, oup):
return nn.Sequential(
nn.Conv2d(inp, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
h_swish()
)
class InvertedResidual(nn.Module):
def __init__(self, inp, hidden_dim, oup, kernel_size, stride, use_se, use_hs):
super(InvertedResidual, self).__init__()
assert stride in [1, 2]
self.identity = stride == 1 and inp == oup
if inp == hidden_dim:
self.conv = nn.Sequential(
# dw
nn.Conv2d(hidden_dim, hidden_dim, kernel_size, stride, (kernel_size - 1) // 2, groups=hidden_dim,
bias=False),
nn.BatchNorm2d(hidden_dim),
h_swish() if use_hs else nn.ReLU(inplace=True),
# Squeeze-and-Excite
SELayer(hidden_dim) if use_se else nn.Sequential(),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
else:
self.conv = nn.Sequential(
# pw
nn.Conv2d(inp, hidden_dim, 1, 1, 0, bias=False),
nn.BatchNorm2d(hidden_dim),
h_swish() if use_hs else nn.ReLU(inplace=True),
# dw
nn.Conv2d(hidden_dim, hidden_dim, kernel_size, stride, (kernel_size - 1) // 2, groups=hidden_dim,
bias=False),
nn.BatchNorm2d(hidden_dim),
# Squeeze-and-Excite
SELayer(hidden_dim) if use_se else nn.Sequential(),
h_swish() if use_hs else nn.ReLU(inplace=True),
# pw-linear
nn.Conv2d(hidden_dim, oup, 1, 1, 0, bias=False),
nn.BatchNorm2d(oup),
)
def forward(self, x):
if self.identity:
return x + self.conv(x)
else:
return self.conv(x)
class PoseMobileNet(nn.Module):
def __init__(self, cfgs, mode, heads, head_conv, width_mult=1.):
self.inplanes = 64
self.heads = heads
self.deconv_with_bias = False
super(PoseMobileNet, self).__init__()
self.cfgs = cfgs
assert mode in ['large', 'small']
# building first layer
input_channel = _make_divisible(16 * width_mult, 8)
layers = [conv_3x3_bn(3, input_channel, 2)]
# building inverted residual blocks
block = InvertedResidual
for k, exp_size, c, use_se, use_hs, s in self.cfgs:
output_channel = _make_divisible(c * width_mult, 8)
layers.append(block(input_channel, exp_size, output_channel, k, s, use_se, use_hs))
input_channel = output_channel
self.features = nn.Sequential(*layers)
# building last several layers
self.conv = nn.Sequential(
conv_1x1_bn(input_channel, _make_divisible(exp_size * width_mult, 8)),
SELayer(_make_divisible(exp_size * width_mult, 8)) if mode == 'small' else nn.Sequential()
)
self.inplanes = _make_divisible(exp_size * width_mult, 8)
self.deconv_layers = self._make_deconv_layer(
3,
[256, 128, 64],
[4, 4, 4],
)
for head in self.heads:
classes = self.heads[head]
if head_conv > 0:
fc = nn.Sequential(
nn.Conv2d(64, head_conv,
kernel_size=3, padding=1, bias=True),
nn.ReLU(inplace=True),
nn.Conv2d(head_conv, classes,
kernel_size=1, stride=1,
padding=0, bias=True))
if 'hm' in head:
fc[-1].bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
else:
fc = nn.Conv2d(64, classes,
kernel_size=1, stride=1,
padding=0, bias=True)
if 'hm' in head:
fc.bias.data.fill_(-2.19)
else:
fill_fc_weights(fc)
self.__setattr__(head, fc)
def _get_deconv_cfg(self, deconv_kernel, index):
if deconv_kernel == 4:
padding = 1
output_padding = 0
elif deconv_kernel == 3:
padding = 1
output_padding = 1
elif deconv_kernel == 2:
padding = 0
output_padding = 0
return deconv_kernel, padding, output_padding
def _make_deconv_layer(self, num_layers, num_filters, num_kernels):
assert num_layers == len(num_filters), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
assert num_layers == len(num_kernels), \
'ERROR: num_deconv_layers is different len(num_deconv_filters)'
layers = []
for i in range(num_layers):
kernel, padding, output_padding = \
self._get_deconv_cfg(num_kernels[i], i)
planes = num_filters[i]
# fc = DeformConvPack(self.inplanes, planes,
# kernel_size=(3, 3), stride=1,
# padding=1, dilation=1, deformable_groups=1)
fc = nn.Conv2d(self.inplanes, planes,
kernel_size=3, stride=1,
padding=1, dilation=1, bias=False)
# fill_fc_weights(fc)
up = nn.ConvTranspose2d(
in_channels=planes,
out_channels=planes,
kernel_size=kernel,
stride=2,
padding=padding,
output_padding=output_padding,
bias=self.deconv_with_bias)
fill_up_weights(up)
layers.append(fc)
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
layers.append(up)
layers.append(nn.BatchNorm2d(planes, momentum=BN_MOMENTUM))
layers.append(nn.ReLU(inplace=True))
self.inplanes = planes
return nn.Sequential(*layers)
def forward(self, x):
x = self.features(x)
x = self.conv(x)
print(x.shape)
x = self.deconv_layers(x)
return [
self.__getattr__(head)(x)
for head in self.heads
]
# ret = {}
# for head in self.heads:
# ret[head] = self.__getattr__(head)(x)
# return [ret]
def init_weights(self):
if 1:
# pretrained_state_dict = torch.load('/home/akirasosa/data/pretrained/mobilenetv3-small-c7eb32fe.pth')
pretrained_state_dict = model_zoo.load_url(
'https://storage.googleapis.com/sosa-pub/mobilenetv3-small-c7eb32fe.pth')
self.load_state_dict(pretrained_state_dict, strict=False)
print('=> init deconv weights from normal distribution')
for name, m in self.deconv_layers.named_modules():
if isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def get_pose_net(num_layers, heads, head_conv=64):
cfgs = [
# k, t, c, SE, NL, s
[3, 16, 16, 1, 0, 2],
[3, 72, 24, 0, 0, 2],
[3, 88, 24, 0, 0, 1],
[5, 96, 40, 1, 1, 2],
[5, 240, 40, 1, 1, 1],
[5, 240, 40, 1, 1, 1],
[5, 120, 48, 1, 1, 1],
[5, 144, 48, 1, 1, 1],
[5, 288, 96, 1, 1, 2],
[5, 576, 96, 1, 1, 1],
[5, 576, 96, 1, 1, 1],
]
model = PoseMobileNet(cfgs, mode='small', heads=heads, head_conv=head_conv)
model.init_weights()
return model
if __name__ == '__main__':
heads = {'hm': 1, 'wh': 2, 'hps': 34, 'reg': 2, 'hm_hp': 17, 'hp_offset': 2}
net = get_pose_net(0, heads).to('cuda')
out = net(torch.randn((2, 3, 512, 512)).to('cuda'))
# for o in out:
# print(o.shape)
|
[
"[email protected]"
] | |
a8c5736efe67d43d9082343c8675d35c51e02d45
|
eaf4027bfff9c063af70ac8393ccf2a960ea4485
|
/app/views.py
|
31b8262e10f072baefaac074d14e8c10df814e1a
|
[] |
no_license
|
williamjohngardner/movie_api
|
de062040fef7cdf8796040aa992600f18a88d0cd
|
1b452b3da9cb19f97864f34f5c590f6f11798192
|
refs/heads/master
| 2020-12-19T04:42:10.354866 | 2016-06-28T19:25:55 | 2016-06-28T19:25:55 | 62,158,662 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,046 |
py
|
from django.shortcuts import render
from app.models import Movie, Rater, Rating
from django.views.generic import View
from rest_framework import generics
from app.serializers import MovieSerializer, RaterSerializer, RatingSerializer
class MovieListAPIView(generics.ListCreateAPIView):
queryset = Movie.objects.all()
serializer_class = MovieSerializer
class MovieDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Movie.objects.all()
serializer_class = MovieSerializer
class RaterListAPIView(generics.ListCreateAPIView):
queryset = Rater.objects.all()
serializer_class = RaterSerializer
class RaterDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Rater.objects.all()
serializer_class = RaterSerializer
class RatingListAPIView(generics.ListCreateAPIView):
queryset = Rating.objects.all()
serializer_class = RatingSerializer
class RatingDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Rating.objects.all()
serializer_class = RatingSerializer
|
[
"[email protected]"
] | |
c4a68abfea2dc3f7dff0da1407aeeddae4027912
|
f83f5cef508f0356e33e9cca83dce176249090e2
|
/apps/calculate_percentage.py
|
5437777e461de4e23769c716d917dbf5f6280bfa
|
[] |
no_license
|
AnupJoseph/topic_centrality
|
1bcb75b4eaee4536c28dc0fec9927421412f6463
|
21811e8a500f74b4032b0fea5d664320b6f335e8
|
refs/heads/master
| 2023-01-19T11:26:59.218020 | 2020-11-28T09:13:50 | 2020-11-28T09:13:50 | 280,427,036 | 5 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,023 |
py
|
import pandas as pd
from collections import Counter
import os.path
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
def calculate_percentage():
politicians = ['SenSanders', 'realDonaldTrump', 'JoeBiden', 'andrewcuomo', 'TeamPelosi',
'NikkiHaley', 'MittRomney', 'Mike_Pence', 'SenatorCollins', 'PeteButtigieg']
COLS = ['id', 'created_at', 'original_text', 'clean_text',
'retweet_count', 'hashtags', 'mentions', 'original_author']
data = pd.DataFrame(columns=COLS)
for politician in politicians:
df = pd.read_csv(f"data/{politician}/{politician}_data_temp.csv")
# df.drop(labels=['Unnamed: 0','Unnamed: 0.1'],inplace=True)
df.drop('Unnamed: 0',inplace=True,axis=1)
df.drop('Unnamed: 0.1',inplace=True,axis=1)
data = pd.concat([data,df])
percentage_data = Counter(data['lda_cluster'])
total = sum(percentage_data.values())
return[(item/total)*100 for item in percentage_data.values()]
|
[
"[email protected]"
] | |
98bbe7cbbd152d0babe84eee89c53b25ed74ea83
|
78f50f3777963500aa78e0d98314a54c46ceed07
|
/cupy/random/__init__.py
|
08fbaac10a4b123afc442d5a291a276548fb9b48
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
KillEdision/chainer
|
78d360f713936de9ed9200b0bc08dc27435f8c27
|
3f70edd67db4d9b687bd4b5f9cc21e426ad58beb
|
refs/heads/master
| 2020-12-24T18:22:51.794784 | 2015-09-12T12:35:05 | 2015-09-12T12:35:05 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 580 |
py
|
import numpy
from cupy.random import distributions
from cupy.random import generator
from cupy.random import sample
rand = sample.rand
randn = sample.randn
random_sample = sample.random_sample
random = random_sample
ranf = random_sample
sample = random_sample
bytes = numpy.random.bytes
lognormal = distributions.lognormal
normal = distributions.normal
standard_normal = distributions.standard_normal
uniform = distributions.uniform
RandomState = generator.RandomState
get_random_state = generator.get_random_state
seed = generator.seed
reset_states = generator.reset_states
|
[
"[email protected]"
] | |
64d00c72230a28b3a56922bc825e9b10434e3e27
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_155/3071.py
|
f0ee9ea5da1cad7eb9ae4cc51b642c17264b5911
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 426 |
py
|
file=open("E:/q1.in")
ansfile=open("E:/ans01.txt","w")
lines=file.readlines();
noitems=int(lines[0])
l=[]
c=0
ans=0
for i in range (1,noitems+1):
c=0
ans=0
l=list(lines[i].strip("\n").split(" ")[1])
for j in range(0,len(l)):
if c<j:
ans+=j-c
c=j
c+=int(l[j])
ansfile.write("case #"+str(i)+": "+str(ans)+"\n");
file.close()
ansfile.close()
|
[
"[email protected]"
] | |
5d9b4ae82cb669a73d1b4b521ff3accb5d759d9e
|
5b9a7423f4f52b905be652cb8bbd10072bf9ffcd
|
/brute_http_form.py
|
cdb6b030d7886562eb7bd8b7e5c4af268ea95e3f
|
[] |
no_license
|
obnosis/scripts
|
f411722253215c73bfc467dfa107912980d52e45
|
df9db475ab5920823e4a11faf4a880de7e633cc6
|
refs/heads/master
| 2021-01-16T18:14:29.109986 | 2013-04-05T15:55:32 | 2013-04-05T15:55:32 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 337 |
py
|
import requests
URL = ''
USER_VAR = 'user_id'
PASS_VAR = 'user_pw'
def get_users():
return ['admin']
def get_pwds():
return ['Test1', 'Test2', 'Camera1']
for user in get_users():
for pwd in get_pwds():
auth = {USER_VAR: user, PASS_VAR: pwd}
resp = requests.post(URL, data=auth)
print resp.text
|
[
"[email protected]"
] | |
0bf420d601adfcaefef9e783bb2ceb944757decc
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02785/s693405136.py
|
c468c690c6909138d5659733d995c58b43534902
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 80 |
py
|
N,K,*H = map(int, open(0).read().split())
H.sort(reverse=True)
print(sum(H[K:]))
|
[
"[email protected]"
] | |
f0ca5cacf63bb531c1973663118d4212e89662e0
|
21b0b4c27193898207751c91b8b2ed168a1b1638
|
/py/py_0418_factorisation_triples.py
|
bf71bc166aae2762f714f628cc860fd4ee8c29c9
|
[
"MIT"
] |
permissive
|
lcsm29/project-euler
|
67560a4e66968f1671a3d7ecf2dda6c956893dca
|
fab794ece5aa7a11fc7c2177f26250f40a5b1447
|
refs/heads/main
| 2023-07-04T11:45:24.374841 | 2021-08-07T08:20:41 | 2021-08-07T08:20:41 | 371,808,781 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 672 |
py
|
# Solution of;
# Project Euler Problem 418: Factorisation triples
# https://projecteuler.net/problem=418
#
# Let n be a positive integer. An integer triple (a, b, c) is called a
# factorisation triple of n if: 1 ≤ a ≤ b ≤ c a·b·c = n. Define f(n) to be a +
# b + c for the factorisation triple (a, b, c) of n which minimises c / a. One
# can show that this triple is unique. For example, f(165) = 19, f(100100) =
# 142 and f(20!) = 4034872. Find f(43!).
#
# by lcsm29 http://github.com/lcsm29/project-euler
import timed
def dummy(n):
pass
if __name__ == '__main__':
n = 1000
i = 10000
prob_id = 418
timed.caller(dummy, n, i, prob_id)
|
[
"[email protected]"
] | |
d8a5ceb674eade3bb97a5b46c8118190fc310a72
|
ce74ed4ad6834168b81d6ec5e53c80935f247fe1
|
/scripts/evaluate.simple.py
|
131baf5404548192c5e940fb3814493809dd7554
|
[] |
no_license
|
chenghuige/melt
|
6b6984243c71a85ec343cfaa67a66e3d1b48c180
|
d2646ffe84eabab464b4bef6b31d218abdbf6ce5
|
refs/heads/master
| 2021-01-25T16:46:57.567890 | 2017-08-26T04:30:13 | 2017-08-26T04:30:13 | 101,304,210 | 6 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,796 |
py
|
#!/usr/bin/env python
#coding=gbk
# ==============================================================================
# \file evaluate.py
# \author chenghuige
# \date 2014-01-04 08:58:40.965360
# \Description similary to TLC show : confusion matrix , pic of auc
# input is one file : instance,true,probability,assigned,..
# for libsvm test, need to file as input feature(.libsvm) and result(.predict) ->svm-evluate.py or svm-gen-evaluate.py first
# for tlc the header format is: instance,true, assigned,output, probability
# TODO understancd other output of tlc and add more
# ==============================================================================
import sys,os,glob
from gflags import *
#hack for some machine sklearn/externals/joblib/parallel.py:41: UserWarning: This platform lacks a functioning sem_open implementation, therefore, the required synchronization primitives needed will not function, see issue 3770.. joblib will operate in serial mode
import warnings
warnings.filterwarnings("ignore")
#hack for cxfreeze
import sklearn.utils.sparsetools._graph_validation
from scipy.sparse.csgraph import _validation
from sklearn.utils import lgamma
#import pylab as pl
#import matplotlib.pyplot as pl
from sklearn.metrics import roc_curve, auc
from sklearn.metrics import precision_recall_curve
DEFINE_boolean('show', False, 'wehter to show the roc pic')
DEFINE_float('thre', 0.5, 'thre for desciding predict')
DEFINE_string('image', 'temp.roc.pr.png', 'output image')
DEFINE_integer('max_num', 20, 'most to deal')
DEFINE_string('regex', '', 'use regex to find files to deal')
DEFINE_string('column', 'probability', 'score index name')
#confusion matrix, auc, roc curve
def evaluate(label_list, predicts, predict_list, file_name):
#---------------------------------confusion table
tp = 0
fp = 0
tn = 0
fn = 0
for i in range(len(label_list)):
if (predict_list[i] == 1):
if (label_list[i] == 1):
tp += 1
else:
fp += 1
else:
if (label_list[i] == 1):
fn += 1
else:
tn += 1
num_pos = tp + fn
num_neg = fp + tn
total_instance = num_pos + num_neg
pratio = num_pos * 1.0 / total_instance
#true positive rate
tpr = tp * 1.0 / num_pos
tnr = tn * 1.0 / num_neg
#num of predicted positive
num_pp = tp + fp
num_pn = fn + tn
#tur postive accuracy
tpa = 1
tna = 1
if num_pp != 0:
tpa = tp * 1.0 / num_pp
if num_pn != 0:
tna = tn * 1.0 / num_pn
ok_num = tp + tn
accuracy = ok_num * 1.0 / total_instance
print """
TEST POSITIVE RATIO: %.4f (%d/(%d+%d))
Confusion table:
||===============================|
|| PREDICTED |
TRUTH || positive | negative | RECALL
||===============================|
positive|| %-5d | %-5d | [%.4f] (%d / %d)
negative|| %-5d | %-5d | %.4f (%d / %d) wushang:[%.4f]
||===============================|
PRECISION [%.4f] (%d/%d) %.4f(%d/%d)
OVERALL 0/1 ACCURACY: %.4f (%d/%d)
"""%(pratio, num_pos, num_pos, num_neg, tp, fn, tpr, tp, num_pos, fp, tn, tnr, tn, num_neg, 1 - tnr, tpa, tp, num_pp, tna, tn, num_pn, accuracy, ok_num, total_instance)
#----------------------------------------------------- auc area
#from sklearn.metrics import roc_auc_score
#auc = roc_auc_score(label_list, predicts)
fpr_, tpr_, thresholds = roc_curve(label_list, predicts)
roc_auc = auc(fpr_, tpr_)
print """
ACCURACY: %.4f
POS. PRECISION: %.4f
POS. RECALL: %.4f
NEG. PRECISION: %.4f
NEG. RECALL: %.4f
AUC: [%.4f]
"""%(accuracy, tpa, tpr, tna, tnr, roc_auc)
#------------------------------------------------------roc curve
#pl.clf()
#pl.plot(fpr_, tpr_, label='%s: (area = %0.4f)' % (file_name, roc_auc))
#pl.plot([0, 1], [0, 1], 'k--')
#pl.xlim([0.0, 1.0])
#pl.ylim([0.0, 1.0])
#pl.xlabel('False Positive Rate')
#pl.ylabel('True Positive Rate')
#pl.title('Roc Curve:')
#pl.legend(loc="upper right")
def parse_input(input):
lines = open(input).readlines()
header = lines[0]
lines = lines[1:]
label_idx = 1
output_idx = 3
probability_idx = 4
names = header.split()
for i in range(len(names)):
if (names[i].lower() == 'label' or names[i].lower() == 'true'):
label_idx = i
if (names[i].lower() == 'output'):
output_idx = i
if (names[i].lower() == FLAGS.column.lower()):
probability_idx = i
try:
line_list = [line.strip().split() for line in lines]
label_list = [int(float((l[label_idx]))) for l in line_list]
predicts = [float(l[probability_idx]) for l in line_list]
#predicts = [float(l[output_idx]) for l in line_list]
predict_list = [int(item >= FLAGS.thre) for item in predicts]
return label_list, predicts, predict_list
except Exception:
print "label_idx: " + str(label_idx) + " prob_idx: " + str(probability_idx)
exit(1)
def precision_recall(label_list, predicts, file_name):
# Compute Precision-Recall and plot curve
precision, recall, thresholds = precision_recall_curve(label_list, predicts)
area = auc(recall, precision)
#print("Area Under Curve: %0.2f" % area)
#pl.clf()
#pl.plot(recall, precision, label='%s (area = %0.4f)'%(file_name, area))
#pl.xlabel('Recall')
#pl.ylabel('Precision')
#pl.ylim([0.0, 1.05])
#pl.xlim([0.0, 1.0])
#pl.title('Precision-Recall curve')
#pl.legend(loc="lower left")
def main(argv):
try:
argv = FLAGS(argv) # parse flags
except gflags.FlagsError, e:
print '%s\nUsage: %s ARGS\n%s' % (e, sys.argv[0], FLAGS)
sys.exit(1)
pos = len(argv) - 1
try:
FLAGS.thre = float(argv[-1])
pos -= 1
except Exception:
pass
#---------------------------------thre
print "Thre: %.4f"%FLAGS.thre
#---------------------------------deal input
l = []
if (FLAGS.regex != ""):
print "regex: " + FLAGS.regex
l = glob.glob(FLAGS.regex)
print l
else:
input = argv[1]
l = input.split()
if (len(l) > 1):
FLAGS.show = True
if (len(l) > FLAGS.max_num):
l = l[:FLAGS.max_num]
#deal with more than 1 input
#f = pl.figure("Model Evaluation",figsize=(32,12), dpi = 100)
#f.add_subplot(1, 2, 1)
for input in l:
print "--------------- " + input
label_list, predicts, predict_list = parse_input(input)
evaluate(label_list, predicts, predict_list, input)
#f.add_subplot(1, 2, 0)
for input in l:
label_list, predicts, predict_list = parse_input(input)
precision_recall(label_list, predicts, input)
else:
input2 = ""
if (pos > 1):
input2 = argv[2]
#FLAGS.show = True
print "--------------- " + input
label_list, predicts, predict_list = parse_input(input)
#f = pl.figure(figsize=(32,12))
#f.add_subplot(1, 2, 1)
evaluate(label_list, predicts, predict_list, input)
print "--------------- " + input2
label_list2 = []
predicts2 = []
predict_list2 = []
if (input2 != ""):
label_list2, predicts2, predict_list2 = parse_input(input2)
evaluate(label_list2, predicts2, predict_list2, input2)
#f.add_subplot(1, 2, 0)
precision_recall(label_list, predicts, input)
if (input2 != ""):
precision_recall(label_list2, predicts2, input2)
#pl.savefig(FLAGS.image)
#if (FLAGS.show):
# pl.show()
if __name__ == "__main__":
main(sys.argv)
|
[
"chenghuige@fa64baa9-71d1-4fed-97ae-c15534abce97"
] |
chenghuige@fa64baa9-71d1-4fed-97ae-c15534abce97
|
b1f21c6a67c048f668ee6bd826fcfd903544ad41
|
04b1803adb6653ecb7cb827c4f4aa616afacf629
|
/third_party/blink/web_tests/external/wpt/mathml/tools/xHeight.py
|
724352bf91679ea9a7e4d862ad43b5707f9cfb7b
|
[
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft"
] |
permissive
|
Samsung/Castanets
|
240d9338e097b75b3f669604315b06f7cf129d64
|
4896f732fc747dfdcfcbac3d442f2d2d42df264a
|
refs/heads/castanets_76_dev
| 2023-08-31T09:01:04.744346 | 2021-07-30T04:56:25 | 2021-08-11T05:45:21 | 125,484,161 | 58 | 49 |
BSD-3-Clause
| 2022-10-16T19:31:26 | 2018-03-16T08:07:37 | null |
UTF-8
|
Python
| false | false | 260 |
py
|
#!/usr/bin/python
from utils import mathfont
import fontforge
v = mathfont.em / 2
f = mathfont.create("xheight%d" % v)
g = f.createChar(ord('x'))
mathfont.drawRectangleGlyph(g, mathfont.em, v, 0)
assert f.xHeight == v, "Bad x-height value!"
mathfont.save(f)
|
[
"[email protected]"
] | |
76e8517821c4f615d7904b26dc91fa843601111f
|
e16fbfdf1e3074d59b70902073c3024bafa77235
|
/spider/dongguan/dongguan/pipelines.py
|
28c993d4e8d12f03a0724243a46208c56c7fa9da
|
[] |
no_license
|
haha479/Scrapy
|
aa52a0999ef3b2c8570696a85cc6dfd95ebd1d03
|
d6c2c12b94e2ecf4846d20bfe8349e3bd09a3beb
|
refs/heads/master
| 2021-09-04T14:29:38.555015 | 2018-01-19T14:42:07 | 2018-01-19T14:42:07 | 117,560,606 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 579 |
py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import codecs
import json
class DongguanPipeline(object):
def __init__(self):
self.filename = codecs.open("donggguan.json", "w", encoding="utf-8")
def process_item(self, item, spider):
text = json.dumps(dict(item),ensure_ascii=False) + "\n"
self.filename.write(text)
return item
def close_spider(self,spider):
self.filename.close()
|
[
"[email protected]"
] | |
177cdee2c48eaed1e374cd0c9a356b1efbab84af
|
05dc9e0e2d14246c3b920abedfef5526eee66f1d
|
/env/bin/python-config
|
4260ff927740213c2939ecf3598615b0a09f0195
|
[] |
no_license
|
msrshahrukh100/temporary
|
e1994dce7894718a0146b60b0408d58508424a58
|
7e2092b90a903f3b985fa8194061ab94a0808454
|
refs/heads/master
| 2020-05-23T08:15:30.217505 | 2016-10-07T18:55:34 | 2016-10-07T18:55:34 | 70,273,763 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,357 |
#!/home/mohammad/JamiaMemoriesWebsite/env/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
|
[
"[email protected]"
] | ||
4a5793eee465438873f58bc2bc7c5684c2671b42
|
b6febded37da6c15b13c44a6d041d83306699abd
|
/netneurotools/freesurfer.py
|
5941f380c456e7efe02977caf0e1fdbc3563c705
|
[
"BSD-3-Clause"
] |
permissive
|
allizwell2018/netneurotools
|
43c7a98e72ab00d2414981022744da51961b78c7
|
dfe3f309c621b44d7f075805c8872764ef3e1139
|
refs/heads/master
| 2020-09-13T19:14:32.156414 | 2019-11-04T16:32:09 | 2019-11-04T16:32:09 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 20,004 |
py
|
# -*- coding: utf-8 -*-
"""
Functions for working with FreeSurfer data and parcellations
"""
import os
import os.path as op
from nibabel.freesurfer import read_annot, read_geometry
import numpy as np
from scipy.ndimage.measurements import _stats, labeled_comprehension
from scipy.spatial.distance import cdist
from .datasets import fetch_fsaverage
from .stats import gen_spinsamples
from .utils import check_fs_subjid, run
def apply_prob_atlas(subject_id, gcs, hemi, *, orig='white', annot=None,
ctab=None, subjects_dir=None, use_cache=True,
quiet=False):
"""
Creates an annotation file for `subject_id` by applying atlas in `gcs`
Runs subprocess calling FreeSurfer's "mris_ca_label" function; as such,
FreeSurfer must be installed and accesible on the local system path.
Parameters
----------
subject_id : str
FreeSurfer subject ID
gcs : str
Filepath to .gcs file containing classifier array
hemi : {'lh', 'rh'}
Hemisphere corresponding to `gcs` file
orig : str, optional
Original surface to which to apply classifer. Default: 'white'
annot : str, optional
Path to output annotation file to generate. If set to None, the name is
created from the provided `hemi` and `gcs`. If provided as a
relative path, it is assumed to stem from `subjects_dir`/`subject_id`.
Default: None
ctab : str, optional
Path to colortable corresponding to `gcs`. Default: None
subjects_dir : str, optional
Path to FreeSurfer subject directory. If not set, will inherit from
the environmental variable $SUBJECTS_DIR. Default: None
use_cache : bool, optional
Whether to check for existence of `annot` in directory specified by
`{subjects_dir}/{subject_id}/label' and use that, if it exists. If
False, will create a new annot file. Default: True
quiet : bool, optional
Whether to restrict status messages. Default: False
Returns
-------
annot : str
Path to generated annotation file
"""
cmd = 'mris_ca_label {opts}{subject_id} {hemi} {hemi}.sphere.reg ' \
'{gcs} {annot}'
if hemi not in ['rh', 'lh']:
raise ValueError('Provided hemisphere designation `hemi` must be one '
'of \'rh\' or \'lh\'. Provided: {}'.format(hemi))
if not op.isfile(gcs):
raise ValueError('Cannot find specified `gcs` file {}.'.format(gcs))
subject_id, subjects_dir = check_fs_subjid(subject_id, subjects_dir)
# add all the options together, as specified
opts = ''
if ctab is not None and op.isfile(ctab):
opts += '-t {} '.format(ctab)
if orig is not None:
opts += '-orig {} '.format(orig)
if subjects_dir is not None:
opts += '-sdir {} '.format(subjects_dir)
else:
subjects_dir = os.environ['SUBJECTS_DIR']
# generate output filename
if annot is None:
base = '{}.{}.annot'.format(hemi, gcs[:-4])
annot = op.join(subjects_dir, subject_id, 'label', base)
else:
# if not a full path, assume relative from subjects_dir/subject_id
if not annot.startswith(op.abspath(os.sep)):
annot = op.join(subjects_dir, subject_id, annot)
# if annotation file doesn't exist or we explicitly want to make a new one
if not op.isfile(annot) or not use_cache:
run(cmd.format(opts=opts, subject_id=subject_id, hemi=hemi,
gcs=gcs, annot=annot),
quiet=quiet)
return annot
def _decode_list(vals):
""" List decoder
"""
return [l.decode() if hasattr(l, 'decode') else l for l in vals]
def find_parcel_centroids(*, lhannot, rhannot, version='fsaverage',
surf='sphere', drop=None):
"""
Returns vertex coords corresponding to centroids of parcels in annotations
Note that using any other `surf` besides the default of 'sphere' may result
in centroids that are not directly within the parcels themselves due to
sulcal folding patterns.
Parameters
----------
{lh,rh}annot : str
Path to .annot file containing labels of parcels on the {left,right}
hemisphere. These must be specified as keyword arguments to avoid
accidental order switching.
version : str, optional
Specifies which version of `fsaverage` provided annotation files
correspond to. Must be one of {'fsaverage', 'fsaverage3', 'fsaverage4',
'fsaverage5', 'fsaverage6'}. Default: 'fsaverage'
surf : str, optional
Specifies which surface projection of fsaverage to use for finding
parcel centroids. Default: 'sphere'
drop : list, optional
Specifies regions in {lh,rh}annot for which the parcel centroid should
not be calculated. If not specified, centroids for 'unknown' and
'corpuscallosum' are not calculated. Default: None
Returns
-------
centroids : (N, 3) numpy.ndarray
xyz coordinates of vertices closest to the centroid of each parcel
defined in `lhannot` and `rhannot`
hemiid : (N,) numpy.ndarray
Array denoting hemisphere designation of coordinates in `centroids`,
where `hemiid=0` denotes the right and `hemiid=1` the left hemisphere
"""
if drop is None:
drop = ['unknown', 'corpuscallosum']
drop = _decode_list(drop)
surfaces = fetch_fsaverage(version)[surf]
centroids, hemiid = [], []
for n, (annot, surf) in enumerate(zip([lhannot, rhannot], surfaces)):
vertices, faces = read_geometry(surf)
labels, ctab, names = read_annot(annot)
names = _decode_list(names)
for lab in np.unique(labels):
if names[lab] in drop:
continue
coords = np.atleast_2d(vertices[labels == lab].mean(axis=0))
roi = vertices[np.argmin(cdist(vertices, coords), axis=0)[0]]
centroids.append(roi)
hemiid.append(n)
return np.row_stack(centroids), np.asarray(hemiid)
def parcels_to_vertices(data, *, lhannot, rhannot, drop=None):
"""
Projects parcellated `data` to vertices defined in annotation files
Assigns np.nan to 'unknown' and 'corpuscallosum' vertices in annotation
files.
Parameters
----------
data : (N,) numpy.ndarray
Parcellated data to be projected to vertices. Parcels should be ordered
by [left, right] hemisphere; ordering within hemisphere should
correspond to the provided annotation files.
{lh,rh}annot : str
Path to .annot file containing labels of parcels on the {left,right}
hemisphere. These must be specified as keyword arguments to avoid
accidental order switching.
drop : list, optional
Specifies regions in {lh,rh}annot that are not present in `data`. NaNs
will be inserted in place of the these regions in the returned data. If
not specified, 'unknown' and 'corpuscallosum' are assumed to not be
present. Default: None
Reurns
------
projected : numpy.ndarray
Vertex-level data
"""
if drop is None:
drop = ['unknown', 'corpuscallosum']
drop = _decode_list(drop)
start = end = 0
projected = []
# check this so we're not unduly surprised by anything...
expected = sum([len(read_annot(a)[-1]) - 2 for a in [lhannot, rhannot]])
if expected != len(data):
raise ValueError('Number of parcels in provided annotation files '
'differs from size of parcellated data array.\n'
' EXPECTED: {} parcels\n'
' RECEIVED: {} parcels'
.format(expected, len(data)))
for annot in [lhannot, rhannot]:
# read files and update end index for `data`
labels, ctab, names = read_annot(annot)
names = _decode_list(names)
end += len(names) - 2 # unknown and corpuscallosum
# get indices of unknown and corpuscallosum and insert NaN values
inds = [names.index(f) - n for n, f in enumerate(drop)]
currdata = np.insert(data[start:end], inds, np.nan)
# project to vertices and store
projected.append(currdata[labels])
start = end
return np.hstack(projected)
def vertices_to_parcels(data, *, lhannot, rhannot, drop=None):
"""
Reduces vertex-level `data` to parcels defined in annotation files
Takes average of vertices within each parcel, excluding np.nan values
(i.e., np.nanmean). Assigns np.nan to parcels for which all vertices are
np.nan.
Parameters
----------
data : (N,) numpy.ndarray
Vertex-level data to be reduced to parcels
{lh,rh}annot : str
Path to .annot file containing labels to parcels on the {left,right}
hemisphere
drop : list, optional
Specifies regions in {lh,rh}annot that should be removed from the
parcellated version of `data`. If not specified, 'unknown' and
'corpuscallosum' will be removed. Default: None
Reurns
------
reduced : numpy.ndarray
Parcellated `data`, without regions specified in `drop`
"""
if drop is None:
drop = ['unknown', 'corpuscallosum']
drop = _decode_list(drop)
start = end = 0
reduced = []
# check this so we're not unduly surprised by anything...
expected = sum([len(read_annot(a)[0]) for a in [lhannot, rhannot]])
if expected != len(data):
raise ValueError('Number of vertices in provided annotation files '
'differs from size of vertex-level data array.\n'
' EXPECTED: {} vertices\n'
' RECEIVED: {} vertices'
.format(expected, len(data)))
for annot in [lhannot, rhannot]:
# read files and update end index for `data`
labels, ctab, names = read_annot(annot)
names = _decode_list(names)
indices = np.unique(labels)
end += len(labels)
# get average of vertex-level data within parcels
# set all NaN values to 0 before calling `_stats` because we are
# returning sums, so the 0 values won't impact the sums (if we left
# the NaNs then all parcels with even one NaN entry would be NaN)
currdata = np.squeeze(data[start:end])
isna = np.isnan(currdata)
counts, sums = _stats(np.nan_to_num(currdata), labels, indices)
# however, we do need to account for the NaN values in the counts
# so that our means are similar to what we'd get from e.g., np.nanmean
# here, our "sums" are the counts of NaN values in our parcels
_, nacounts = _stats(isna, labels, indices)
counts = (np.asanyarray(counts, dtype=float)
- np.asanyarray(nacounts, dtype=float))
with np.errstate(divide='ignore', invalid='ignore'):
currdata = sums / counts
# get indices of unkown and corpuscallosum and delete from parcels
inds = [names.index(f) for f in drop]
currdata = np.delete(currdata, inds)
# store parcellated data
reduced.append(currdata)
start = end
return np.hstack(reduced)
def _get_fsaverage_coords(version='fsaverage', surface='sphere'):
"""
Gets vertex coordinates for specified `surface` of fsaverage `version`
Parameters
----------
version : str, optional
One of {'fsaverage', 'fsaverage3', 'fsaverage4', 'fsaverage5',
'fsaverage6'}. Default: 'fsaverage'
surface : str, optional
Surface for which to return vertex coordinates. Default: 'sphere'
Returns
-------
coords : (N, 3) numpy.ndarray
xyz coordinates of vertices for {left,right} hemisphere
hemiid : (N,) numpy.ndarray
Array denoting hemisphere designation of entries in `coords`, where
`hemiid=0` denotes the left and `hemiid=1` the right hemisphere
"""
# get coordinates and hemisphere designation for spin generation
lhsphere, rhsphere = fetch_fsaverage(version)[surface]
coords, hemi = [], []
for n, sphere in enumerate([lhsphere, rhsphere]):
coords.append(read_geometry(sphere)[0])
hemi.append(np.ones(len(coords[-1])) * n)
return np.row_stack(coords), np.hstack(hemi)
def spin_data(data, *, lhannot, rhannot, version='fsaverage', n_rotate=1000,
drop=None, seed=None, verbose=False, return_cost=False):
"""
Projects parcellated `data` to surface, rotates, and re-parcellates
Projection to the surface uses `{lh,rh}annot` files. Rotation uses vertex
coordinates from the specified fsaverage `version` and relies on
:func:`netneurotools.stats.gen_spinsamples`. Re-parcellated data will not
be exactly identical to original values due to re-averaging process.
Parcels subsumed by regions in `drop` will be listed as NaN.
Parameters
----------
data : (N,) numpy.ndarray
Parcellated data to be rotated. Parcels should be ordered by [left,
right] hemisphere; ordering within hemisphere should correspond to the
provided `{lh,rh}annot` annotation files.
{lh,rh}annot : str
Path to .annot file containing labels to parcels on the {left,right}
hemisphere
version : str, optional
Specifies which version of `fsaverage` provided annotation files
correspond to. Must be one of {'fsaverage', 'fsaverage3', 'fsaverage4',
'fsaverage5', 'fsaverage6'}. Default: 'fsaverage'
n_rotate : int, optional
Number of rotations to generate. Default: 1000
drop : list, optional
Specifies regions in {lh,rh}annot that are not present in `data`. NaNs
will be inserted in place of the these regions in the returned data. If
not specified, 'unknown' and 'corpuscallosum' are assumed to not be
present. Default: None
seed : {int, np.random.RandomState instance, None}, optional
Seed for random number generation. Default: None
verbose : bool, optional
Whether to print occasional status messages. Default: False
return_cost : bool, optional
Whether to return cost array (specified as Euclidean distance) for each
coordinate for each rotation Default: True
Returns
-------
rotated : (N, `n_rotate`) numpy.ndarray
Rotated `data
cost : (N, `n_rotate`,) numpy.ndarray
Cost (specified as Euclidean distance) of re-assigning each coordinate
for every rotation in `spinsamples`. Only provided if `return_cost` is
True.
"""
if drop is None:
drop = ['unknown', 'corpuscallosum']
# get coordinates and hemisphere designation for spin generation
coords, hemiid = _get_fsaverage_coords(version, 'sphere')
vertices = parcels_to_vertices(data, lhannot=lhannot, rhannot=rhannot,
drop=drop)
if len(vertices) != len(coords):
raise ValueError('Provided annotation files have a different number '
'of vertices than the specified fsaverage surface.\n'
' ANNOTATION: {} vertices\n'
' FSAVERAGE: {} vertices'
.format(len(vertices), len(coords)))
spins, cost = gen_spinsamples(coords, hemiid, n_rotate=n_rotate,
seed=seed, verbose=verbose)
spun = np.zeros((len(data), n_rotate))
for n in range(n_rotate):
spun[:, n] = vertices_to_parcels(vertices[spins[:, n]],
lhannot=lhannot, rhannot=rhannot,
drop=drop)
if return_cost:
return spun, cost
return spun
def spin_parcels(*, lhannot, rhannot, version='fsaverage', n_rotate=1000,
drop=None, seed=None, verbose=False, return_cost=False):
"""
Rotates parcels in `{lh,rh}annot` and re-assigns based on maximum overlap
Vertex labels are rotated with :func:`netneurotools.stats.gen_spinsamples`
and a new label is assigned to each *parcel* based on the region maximally
overlapping with its boundaries.
Parameters
----------
{lh,rh}annot : str
Path to .annot file containing labels to parcels on the {left,right}
hemisphere
version : str, optional
Specifies which version of `fsaverage` provided annotation files
correspond to. Must be one of {'fsaverage', 'fsaverage3', 'fsaverage4',
'fsaverage5', 'fsaverage6'}. Default: 'fsaverage'
n_rotate : int, optional
Number of rotations to generate. Default: 1000
drop : list, optional
Specifies regions in {lh,rh}annot that are not present in `data`. NaNs
will be inserted in place of the these regions in the returned data. If
not specified, 'unknown' and 'corpuscallosum' are assumed to not be
present. Default: None
seed : {int, np.random.RandomState instance, None}, optional
Seed for random number generation. Default: None
verbose : bool, optional
Whether to print occasional status messages. Default: False
return_cost : bool, optional
Whether to return cost array (specified as Euclidean distance) for each
coordinate for each rotation Default: True
Returns
-------
spinsamples : (N, `n_rotate`) numpy.ndarray
Resampling matrix to use in permuting data parcellated with labels from
{lh,rh}annot, where `N` is the number of parcels. Indices of -1
indicate that the parcel was completely encompassed by regions in
`drop` and should be ignored.
cost : (N, `n_rotate`,) numpy.ndarray
Cost (specified as Euclidean distance) of re-assigning each coordinate
for every rotation in `spinsamples`. Only provided if `return_cost` is
True.
"""
def overlap(vals):
""" Returns most common non-negative value in `vals`; -1 if all neg
"""
vals = np.asarray(vals)
vals, counts = np.unique(vals[vals > 0], return_counts=True)
try:
return vals[counts.argmax()]
except ValueError:
return -1
if drop is None:
drop = ['unknown', 'corpuscallosum']
drop = _decode_list(drop)
# get vertex-level labels (set drop labels to - values)
vertices, end = [], 0
for n, annot in enumerate([lhannot, rhannot]):
labels, ctab, names = read_annot(annot)
names = _decode_list(names)
inds = [names.index(f) - n for n, f in enumerate(drop)]
labs = np.arange(len(names) - len(inds)) + (end - (len(inds) * n))
insert = np.arange(-1, -(len(inds) + 1), -1)
vertices.append(np.insert(labs, inds, insert)[labels])
end += len(names)
vertices = np.hstack(vertices)
labels = np.unique(vertices)
mask = labels > -1
# get coordinates and hemisphere designation for spin generation
coords, hemiid = _get_fsaverage_coords(version, 'sphere')
if len(vertices) != len(coords):
raise ValueError('Provided annotation files have a different number '
'of vertices than the specified fsaverage surface.\n'
' ANNOTATION: {} vertices\n'
' FSAVERAGE: {} vertices'
.format(len(vertices), len(coords)))
# spin and assign regions based on max overlap
spins, cost = gen_spinsamples(coords, hemiid, n_rotate=n_rotate,
seed=seed, verbose=verbose)
regions = np.zeros((len(labels[mask]), n_rotate), dtype='int32')
for n in range(n_rotate):
regions[:, n] = labeled_comprehension(vertices[spins[:, n]], vertices,
labels, overlap, int, -1)[mask]
if return_cost:
return regions, cost
return regions
|
[
"[email protected]"
] | |
424f0c0c5072d11b2dae9aff8f3df1b4504f7a0b
|
6163c502cba634a922c448219afb6021e88f2747
|
/Data Visualization/Seaborn/faceting with Seaborn.py
|
d1d558c9b2b05a87ac10d7ae795d3d571f1d9c16
|
[] |
no_license
|
Okroshiashvili/Data-Science-Lab
|
9cddf8ff7dae41dabe321fa8804a9b041c24596b
|
796d4f5c0ec8c90d373bde2bfbc0cf244f62e69b
|
refs/heads/master
| 2023-04-05T05:36:48.088628 | 2023-03-22T18:52:04 | 2023-03-22T18:52:04 | 130,345,437 | 12 | 2 | null | 2022-11-21T23:41:31 | 2018-04-20T10:08:23 |
Python
|
UTF-8
|
Python
| false | false | 2,018 |
py
|
"""
Data Link:
https://www.kaggle.com/thec03u5/fifa-18-demo-player-dataset
Faceting is the act of breaking data variables up across multiple subplots,
and combining those subplots into a single figure.
"""
import pandas as pd
import numpy as np
import re
import seaborn as sns
pd.set_option('max_columns',None)
# Read data
footballers = pd.read_csv('data/CompleteDataset.csv', index_col=0)
### Some data pre-processing steps.
# Make a copy
df = footballers.copy(deep=True)
df['Unit'] = footballers['Value'].str[-1]
df['Value (M)'] = np.where(df['Unit'] == '0',0,
df['Value'].str[1:-1].replace(r'[a-zA-Z]',''))
df['Value (M)'] = df['Value (M)'].astype(float)
df['Value (M)'] = np.where(df['Unit'] == 'M',
df['Value (M)'],
df['Value (M)']/1000)
df = df.assign(Value=df['Value (M)'],
position=df['Preferred Positions'].str.split().str[0])
### The FacetGrid ###
# We're interested in comparing strikers with goalkeepers in some way.
data = df[df['position'].isin(['ST','GK'])]
g = sns.FacetGrid(data, col='position')
# We can use map object to plot the data into laid-out grid
g.map(sns.kdeplot, "Overall")
# FacetGrid for all positions
g = sns.FacetGrid(df, col='position', col_wrap=6)
g.map(sns.kdeplot, 'Overall')
# Suppose we're interested in comparing the talent distribution across rival clubs
data = df[df['position'].isin(['ST', 'GK'])]
data = data[data['Club'].isin(['Real Madrid CF', 'FC Barcelona','Atlético Madrid'])]
g = sns.FacetGrid(df, row='position', col='Club')
g.map(sns.violinplot, 'Overall')
# We can order subplots
g = sns.FacetGrid(df, row='position', col='Club',
row_order=['GK', 'ST'],
col_order=['Atlético Madrid', 'FC Barcelona', 'Real Madrid CF'])
g.map(sns.violinplot, 'Overall')
### Pairplot ###
sns.pairplot(df[['Overall','Potential','Value']])
|
[
"[email protected]"
] | |
dd074b8b470a687ed2c08f6fea742702f346f792
|
b610b21ad9645bf099ad9ab0c024ccd212c36b53
|
/AdvancedBlogAPI/Todo/models.py
|
640f45a4439954103cdd19680e2772ee35ff14dd
|
[] |
no_license
|
co-codin/AdvancedBlogAPI
|
7c98aa36a2c9da3df6b23a2fbf6803d5f4ca3f43
|
f478eb272ed067f175dbcfc20efdc10601d5c8bc
|
refs/heads/master
| 2022-02-03T04:35:48.212984 | 2018-08-09T14:23:30 | 2018-08-09T14:23:30 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 226 |
py
|
from django.db import models
# Create your models here.
class Todo(models.Model):
text = models.CharField(max_length=120)
complete = models.BooleanField(default=False)
def __str__(self):
return self.text
|
[
"[email protected]"
] | |
80e47ba9061751983d8ba2a0898defab0b7c9e12
|
94c8dd4126da6e9fe9acb2d1769e1c24abe195d3
|
/test/python/visualization/timeline/test_events.py
|
b799fc7a36f5508ab72eec1b8df9e9c969e5d531
|
[
"Apache-2.0"
] |
permissive
|
levbishop/qiskit-terra
|
a75c2f96586768c12b51a117f9ccb7398b52843d
|
98130dd6158d1f1474e44dd5aeacbc619174ad63
|
refs/heads/master
| 2023-07-19T19:00:53.483204 | 2021-04-20T16:30:16 | 2021-04-20T16:30:16 | 181,052,828 | 1 | 0 |
Apache-2.0
| 2019-06-05T15:32:13 | 2019-04-12T17:20:54 |
Python
|
UTF-8
|
Python
| false | false | 6,989 |
py
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Tests for event of timeline drawer."""
import qiskit
from qiskit import QuantumCircuit, transpile
from qiskit.circuit import library
from qiskit.test import QiskitTestCase
from qiskit.visualization.timeline import events, types
class TestLoadScheduledCircuit(QiskitTestCase):
"""Test for loading program."""
def setUp(self) -> None:
"""Setup."""
super().setUp()
circ = QuantumCircuit(3)
circ.delay(100, 2)
circ.barrier(0, 1, 2)
circ.h(0)
circ.cx(0, 1)
self.circ = transpile(circ,
scheduling_method='alap',
basis_gates=['h', 'cx'],
instruction_durations=[('h', 0, 200), ('cx', [0, 1], 1000)],
optimization_level=0)
def test_create_from_program(self):
"""Test factory method."""
bit_event_q0 = events.BitEvents.load_program(self.circ, self.circ.qregs[0][0])
bit_event_q1 = events.BitEvents.load_program(self.circ, self.circ.qregs[0][1])
bit_event_q2 = events.BitEvents.load_program(self.circ, self.circ.qregs[0][2])
gates_q0 = list(bit_event_q0.get_gates())
links_q0 = list(bit_event_q0.get_gate_links())
barriers_q0 = list(bit_event_q0.get_barriers())
self.assertEqual(len(gates_q0), 3)
self.assertEqual(len(links_q0), 1)
self.assertEqual(len(barriers_q0), 1)
# h gate
self.assertEqual(gates_q0[1].t0, 100)
# cx gate
self.assertEqual(gates_q0[2].t0, 300)
# link
self.assertEqual(links_q0[0].t0, 800)
# barrier
self.assertEqual(barriers_q0[0].t0, 100)
gates_q1 = list(bit_event_q1.get_gates())
links_q1 = list(bit_event_q1.get_gate_links())
barriers_q1 = list(bit_event_q1.get_barriers())
self.assertEqual(len(gates_q1), 3)
self.assertEqual(len(links_q1), 0)
self.assertEqual(len(barriers_q1), 1)
# cx gate
self.assertEqual(gates_q0[2].t0, 300)
# barrier
self.assertEqual(barriers_q1[0].t0, 100)
gates_q2 = list(bit_event_q2.get_gates())
links_q2 = list(bit_event_q2.get_gate_links())
barriers_q2 = list(bit_event_q2.get_barriers())
self.assertEqual(len(gates_q2), 2)
self.assertEqual(len(links_q2), 0)
self.assertEqual(len(barriers_q2), 1)
# barrier
self.assertEqual(barriers_q2[0].t0, 100)
class TestBitEvents(QiskitTestCase):
"""Tests for bit events."""
def setUp(self) -> None:
"""Setup."""
super().setUp()
self.qubits = list(qiskit.QuantumRegister(2))
self.clbits = list(qiskit.ClassicalRegister(2))
self.instructions = [
types.ScheduledGate(t0=0, operand=library.U1Gate(0),
duration=0, bits=[self.qubits[0]], bit_position=0),
types.ScheduledGate(t0=0, operand=library.U2Gate(0, 0),
duration=10, bits=[self.qubits[0]], bit_position=0),
types.ScheduledGate(t0=10, operand=library.CXGate(),
duration=50, bits=[self.qubits[0], self.qubits[1]],
bit_position=0),
types.ScheduledGate(t0=100, operand=library.U3Gate(0, 0, 0),
duration=20, bits=[self.qubits[0]], bit_position=0),
types.ScheduledGate(t0=120, operand=library.Barrier(2),
duration=0, bits=[self.qubits[0], self.qubits[1]],
bit_position=0),
types.ScheduledGate(t0=120, operand=library.CXGate(),
duration=50, bits=[self.qubits[1], self.qubits[0]],
bit_position=1),
types.ScheduledGate(t0=200, operand=library.Barrier(1),
duration=0, bits=[self.qubits[0]], bit_position=0),
types.ScheduledGate(t0=200, operand=library.Measure(),
duration=100, bits=[self.qubits[0], self.clbits[0]],
bit_position=0),
]
def test_gate_output(self):
"""Test gate output."""
bit_event = events.BitEvents(self.qubits[0], self.instructions, 300)
gates = list(bit_event.get_gates())
ref_list = [
types.ScheduledGate(t0=0, operand=library.U1Gate(0),
duration=0, bits=[self.qubits[0]], bit_position=0),
types.ScheduledGate(t0=0, operand=library.U2Gate(0, 0),
duration=10, bits=[self.qubits[0]], bit_position=0),
types.ScheduledGate(t0=10, operand=library.CXGate(),
duration=50, bits=[self.qubits[0], self.qubits[1]],
bit_position=0),
types.ScheduledGate(t0=100, operand=library.U3Gate(0, 0, 0),
duration=20, bits=[self.qubits[0]], bit_position=0),
types.ScheduledGate(t0=120, operand=library.CXGate(),
duration=50, bits=[self.qubits[1], self.qubits[0]],
bit_position=1),
types.ScheduledGate(t0=200, operand=library.Measure(),
duration=100, bits=[self.qubits[0], self.clbits[0]],
bit_position=0)
]
self.assertListEqual(gates, ref_list)
def test_barrier_output(self):
"""Test barrier output."""
bit_event = events.BitEvents(self.qubits[0], self.instructions, 200)
barriers = list(bit_event.get_barriers())
ref_list = [
types.Barrier(t0=120, bits=[self.qubits[0], self.qubits[1]], bit_position=0),
types.Barrier(t0=200, bits=[self.qubits[0]], bit_position=0)
]
self.assertListEqual(barriers, ref_list)
def test_bit_link_output(self):
"""Test link output."""
bit_event = events.BitEvents(self.qubits[0], self.instructions, 250)
links = list(bit_event.get_gate_links())
ref_list = [
types.GateLink(t0=35.0, opname=library.CXGate().name,
bits=[self.qubits[0], self.qubits[1]]),
types.GateLink(t0=250.0, opname=library.Measure().name,
bits=[self.qubits[0], self.clbits[0]])
]
self.assertListEqual(links, ref_list)
|
[
"[email protected]"
] | |
baeb46c0e91b8e8bacdcba90f5e8125da06e03ae
|
82b946da326148a3c1c1f687f96c0da165bb2c15
|
/sdk/python/pulumi_azure_native/notificationhubs/_inputs.py
|
35bb77487b128973f2ef800c02659554c2331d8b
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
morrell/pulumi-azure-native
|
3916e978382366607f3df0a669f24cb16293ff5e
|
cd3ba4b9cb08c5e1df7674c1c71695b80e443f08
|
refs/heads/master
| 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 |
Apache-2.0
| 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null |
UTF-8
|
Python
| false | false | 20,002 |
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from ._enums import *
__all__ = [
'AdmCredentialArgs',
'ApnsCredentialArgs',
'BaiduCredentialArgs',
'GcmCredentialArgs',
'MpnsCredentialArgs',
'SharedAccessAuthorizationRulePropertiesArgs',
'SkuArgs',
'WnsCredentialArgs',
]
@pulumi.input_type
class AdmCredentialArgs:
def __init__(__self__, *,
auth_token_url: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub AdmCredential.
:param pulumi.Input[str] auth_token_url: The URL of the authorization token.
:param pulumi.Input[str] client_id: The client identifier.
:param pulumi.Input[str] client_secret: The credential secret access key.
"""
if auth_token_url is not None:
pulumi.set(__self__, "auth_token_url", auth_token_url)
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_secret is not None:
pulumi.set(__self__, "client_secret", client_secret)
@property
@pulumi.getter(name="authTokenUrl")
def auth_token_url(self) -> Optional[pulumi.Input[str]]:
"""
The URL of the authorization token.
"""
return pulumi.get(self, "auth_token_url")
@auth_token_url.setter
def auth_token_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "auth_token_url", value)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
"""
The client identifier.
"""
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The credential secret access key.
"""
return pulumi.get(self, "client_secret")
@client_secret.setter
def client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret", value)
@pulumi.input_type
class ApnsCredentialArgs:
def __init__(__self__, *,
apns_certificate: Optional[pulumi.Input[str]] = None,
app_id: Optional[pulumi.Input[str]] = None,
app_name: Optional[pulumi.Input[str]] = None,
certificate_key: Optional[pulumi.Input[str]] = None,
endpoint: Optional[pulumi.Input[str]] = None,
key_id: Optional[pulumi.Input[str]] = None,
thumbprint: Optional[pulumi.Input[str]] = None,
token: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub ApnsCredential.
:param pulumi.Input[str] apns_certificate: The APNS certificate. Specify if using Certificate Authentication Mode.
:param pulumi.Input[str] app_id: The issuer (iss) registered claim key. The value is a 10-character TeamId, obtained from your developer account. Specify if using Token Authentication Mode.
:param pulumi.Input[str] app_name: The name of the application or BundleId. Specify if using Token Authentication Mode.
:param pulumi.Input[str] certificate_key: The APNS certificate password if it exists.
:param pulumi.Input[str] endpoint: The APNS endpoint of this credential. If using Certificate Authentication Mode and Sandbox specify 'gateway.sandbox.push.apple.com'. If using Certificate Authentication Mode and Production specify 'gateway.push.apple.com'. If using Token Authentication Mode and Sandbox specify 'https://api.development.push.apple.com:443/3/device'. If using Token Authentication Mode and Production specify 'https://api.push.apple.com:443/3/device'.
:param pulumi.Input[str] key_id: A 10-character key identifier (kid) key, obtained from your developer account. Specify if using Token Authentication Mode.
:param pulumi.Input[str] thumbprint: The APNS certificate thumbprint. Specify if using Certificate Authentication Mode.
:param pulumi.Input[str] token: Provider Authentication Token, obtained through your developer account. Specify if using Token Authentication Mode.
"""
if apns_certificate is not None:
pulumi.set(__self__, "apns_certificate", apns_certificate)
if app_id is not None:
pulumi.set(__self__, "app_id", app_id)
if app_name is not None:
pulumi.set(__self__, "app_name", app_name)
if certificate_key is not None:
pulumi.set(__self__, "certificate_key", certificate_key)
if endpoint is not None:
pulumi.set(__self__, "endpoint", endpoint)
if key_id is not None:
pulumi.set(__self__, "key_id", key_id)
if thumbprint is not None:
pulumi.set(__self__, "thumbprint", thumbprint)
if token is not None:
pulumi.set(__self__, "token", token)
@property
@pulumi.getter(name="apnsCertificate")
def apns_certificate(self) -> Optional[pulumi.Input[str]]:
"""
The APNS certificate. Specify if using Certificate Authentication Mode.
"""
return pulumi.get(self, "apns_certificate")
@apns_certificate.setter
def apns_certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "apns_certificate", value)
@property
@pulumi.getter(name="appId")
def app_id(self) -> Optional[pulumi.Input[str]]:
"""
The issuer (iss) registered claim key. The value is a 10-character TeamId, obtained from your developer account. Specify if using Token Authentication Mode.
"""
return pulumi.get(self, "app_id")
@app_id.setter
def app_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "app_id", value)
@property
@pulumi.getter(name="appName")
def app_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the application or BundleId. Specify if using Token Authentication Mode.
"""
return pulumi.get(self, "app_name")
@app_name.setter
def app_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "app_name", value)
@property
@pulumi.getter(name="certificateKey")
def certificate_key(self) -> Optional[pulumi.Input[str]]:
"""
The APNS certificate password if it exists.
"""
return pulumi.get(self, "certificate_key")
@certificate_key.setter
def certificate_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate_key", value)
@property
@pulumi.getter
def endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The APNS endpoint of this credential. If using Certificate Authentication Mode and Sandbox specify 'gateway.sandbox.push.apple.com'. If using Certificate Authentication Mode and Production specify 'gateway.push.apple.com'. If using Token Authentication Mode and Sandbox specify 'https://api.development.push.apple.com:443/3/device'. If using Token Authentication Mode and Production specify 'https://api.push.apple.com:443/3/device'.
"""
return pulumi.get(self, "endpoint")
@endpoint.setter
def endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "endpoint", value)
@property
@pulumi.getter(name="keyId")
def key_id(self) -> Optional[pulumi.Input[str]]:
"""
A 10-character key identifier (kid) key, obtained from your developer account. Specify if using Token Authentication Mode.
"""
return pulumi.get(self, "key_id")
@key_id.setter
def key_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_id", value)
@property
@pulumi.getter
def thumbprint(self) -> Optional[pulumi.Input[str]]:
"""
The APNS certificate thumbprint. Specify if using Certificate Authentication Mode.
"""
return pulumi.get(self, "thumbprint")
@thumbprint.setter
def thumbprint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "thumbprint", value)
@property
@pulumi.getter
def token(self) -> Optional[pulumi.Input[str]]:
"""
Provider Authentication Token, obtained through your developer account. Specify if using Token Authentication Mode.
"""
return pulumi.get(self, "token")
@token.setter
def token(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "token", value)
@pulumi.input_type
class BaiduCredentialArgs:
def __init__(__self__, *,
baidu_api_key: Optional[pulumi.Input[str]] = None,
baidu_end_point: Optional[pulumi.Input[str]] = None,
baidu_secret_key: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub BaiduCredential.
:param pulumi.Input[str] baidu_api_key: Baidu Api Key.
:param pulumi.Input[str] baidu_end_point: Baidu Endpoint.
:param pulumi.Input[str] baidu_secret_key: Baidu Secret Key
"""
if baidu_api_key is not None:
pulumi.set(__self__, "baidu_api_key", baidu_api_key)
if baidu_end_point is not None:
pulumi.set(__self__, "baidu_end_point", baidu_end_point)
if baidu_secret_key is not None:
pulumi.set(__self__, "baidu_secret_key", baidu_secret_key)
@property
@pulumi.getter(name="baiduApiKey")
def baidu_api_key(self) -> Optional[pulumi.Input[str]]:
"""
Baidu Api Key.
"""
return pulumi.get(self, "baidu_api_key")
@baidu_api_key.setter
def baidu_api_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "baidu_api_key", value)
@property
@pulumi.getter(name="baiduEndPoint")
def baidu_end_point(self) -> Optional[pulumi.Input[str]]:
"""
Baidu Endpoint.
"""
return pulumi.get(self, "baidu_end_point")
@baidu_end_point.setter
def baidu_end_point(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "baidu_end_point", value)
@property
@pulumi.getter(name="baiduSecretKey")
def baidu_secret_key(self) -> Optional[pulumi.Input[str]]:
"""
Baidu Secret Key
"""
return pulumi.get(self, "baidu_secret_key")
@baidu_secret_key.setter
def baidu_secret_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "baidu_secret_key", value)
@pulumi.input_type
class GcmCredentialArgs:
def __init__(__self__, *,
gcm_endpoint: Optional[pulumi.Input[str]] = None,
google_api_key: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub GcmCredential.
:param pulumi.Input[str] gcm_endpoint: The FCM legacy endpoint. Default value is 'https://fcm.googleapis.com/fcm/send'
:param pulumi.Input[str] google_api_key: The Google API key.
"""
if gcm_endpoint is not None:
pulumi.set(__self__, "gcm_endpoint", gcm_endpoint)
if google_api_key is not None:
pulumi.set(__self__, "google_api_key", google_api_key)
@property
@pulumi.getter(name="gcmEndpoint")
def gcm_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The FCM legacy endpoint. Default value is 'https://fcm.googleapis.com/fcm/send'
"""
return pulumi.get(self, "gcm_endpoint")
@gcm_endpoint.setter
def gcm_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "gcm_endpoint", value)
@property
@pulumi.getter(name="googleApiKey")
def google_api_key(self) -> Optional[pulumi.Input[str]]:
"""
The Google API key.
"""
return pulumi.get(self, "google_api_key")
@google_api_key.setter
def google_api_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "google_api_key", value)
@pulumi.input_type
class MpnsCredentialArgs:
def __init__(__self__, *,
certificate_key: Optional[pulumi.Input[str]] = None,
mpns_certificate: Optional[pulumi.Input[str]] = None,
thumbprint: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub MpnsCredential.
:param pulumi.Input[str] certificate_key: The certificate key for this credential.
:param pulumi.Input[str] mpns_certificate: The MPNS certificate.
:param pulumi.Input[str] thumbprint: The MPNS certificate Thumbprint
"""
if certificate_key is not None:
pulumi.set(__self__, "certificate_key", certificate_key)
if mpns_certificate is not None:
pulumi.set(__self__, "mpns_certificate", mpns_certificate)
if thumbprint is not None:
pulumi.set(__self__, "thumbprint", thumbprint)
@property
@pulumi.getter(name="certificateKey")
def certificate_key(self) -> Optional[pulumi.Input[str]]:
"""
The certificate key for this credential.
"""
return pulumi.get(self, "certificate_key")
@certificate_key.setter
def certificate_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate_key", value)
@property
@pulumi.getter(name="mpnsCertificate")
def mpns_certificate(self) -> Optional[pulumi.Input[str]]:
"""
The MPNS certificate.
"""
return pulumi.get(self, "mpns_certificate")
@mpns_certificate.setter
def mpns_certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "mpns_certificate", value)
@property
@pulumi.getter
def thumbprint(self) -> Optional[pulumi.Input[str]]:
"""
The MPNS certificate Thumbprint
"""
return pulumi.get(self, "thumbprint")
@thumbprint.setter
def thumbprint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "thumbprint", value)
@pulumi.input_type
class SharedAccessAuthorizationRulePropertiesArgs:
def __init__(__self__, *,
rights: Optional[pulumi.Input[Sequence[pulumi.Input['AccessRights']]]] = None):
"""
SharedAccessAuthorizationRule properties.
:param pulumi.Input[Sequence[pulumi.Input['AccessRights']]] rights: The rights associated with the rule.
"""
if rights is not None:
pulumi.set(__self__, "rights", rights)
@property
@pulumi.getter
def rights(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AccessRights']]]]:
"""
The rights associated with the rule.
"""
return pulumi.get(self, "rights")
@rights.setter
def rights(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AccessRights']]]]):
pulumi.set(self, "rights", value)
@pulumi.input_type
class SkuArgs:
def __init__(__self__, *,
name: pulumi.Input[Union[str, 'SkuName']],
capacity: Optional[pulumi.Input[int]] = None,
family: Optional[pulumi.Input[str]] = None,
size: Optional[pulumi.Input[str]] = None,
tier: Optional[pulumi.Input[str]] = None):
"""
The Sku description for a namespace
:param pulumi.Input[Union[str, 'SkuName']] name: Name of the notification hub sku
:param pulumi.Input[int] capacity: The capacity of the resource
:param pulumi.Input[str] family: The Sku Family
:param pulumi.Input[str] size: The Sku size
:param pulumi.Input[str] tier: The tier of particular sku
"""
pulumi.set(__self__, "name", name)
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if family is not None:
pulumi.set(__self__, "family", family)
if size is not None:
pulumi.set(__self__, "size", size)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def name(self) -> pulumi.Input[Union[str, 'SkuName']]:
"""
Name of the notification hub sku
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[Union[str, 'SkuName']]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def capacity(self) -> Optional[pulumi.Input[int]]:
"""
The capacity of the resource
"""
return pulumi.get(self, "capacity")
@capacity.setter
def capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "capacity", value)
@property
@pulumi.getter
def family(self) -> Optional[pulumi.Input[str]]:
"""
The Sku Family
"""
return pulumi.get(self, "family")
@family.setter
def family(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "family", value)
@property
@pulumi.getter
def size(self) -> Optional[pulumi.Input[str]]:
"""
The Sku size
"""
return pulumi.get(self, "size")
@size.setter
def size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "size", value)
@property
@pulumi.getter
def tier(self) -> Optional[pulumi.Input[str]]:
"""
The tier of particular sku
"""
return pulumi.get(self, "tier")
@tier.setter
def tier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tier", value)
@pulumi.input_type
class WnsCredentialArgs:
def __init__(__self__, *,
package_sid: Optional[pulumi.Input[str]] = None,
secret_key: Optional[pulumi.Input[str]] = None,
windows_live_endpoint: Optional[pulumi.Input[str]] = None):
"""
Description of a NotificationHub WnsCredential.
:param pulumi.Input[str] package_sid: The package ID for this credential.
:param pulumi.Input[str] secret_key: The secret key.
:param pulumi.Input[str] windows_live_endpoint: The Windows Live endpoint.
"""
if package_sid is not None:
pulumi.set(__self__, "package_sid", package_sid)
if secret_key is not None:
pulumi.set(__self__, "secret_key", secret_key)
if windows_live_endpoint is not None:
pulumi.set(__self__, "windows_live_endpoint", windows_live_endpoint)
@property
@pulumi.getter(name="packageSid")
def package_sid(self) -> Optional[pulumi.Input[str]]:
"""
The package ID for this credential.
"""
return pulumi.get(self, "package_sid")
@package_sid.setter
def package_sid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "package_sid", value)
@property
@pulumi.getter(name="secretKey")
def secret_key(self) -> Optional[pulumi.Input[str]]:
"""
The secret key.
"""
return pulumi.get(self, "secret_key")
@secret_key.setter
def secret_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "secret_key", value)
@property
@pulumi.getter(name="windowsLiveEndpoint")
def windows_live_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The Windows Live endpoint.
"""
return pulumi.get(self, "windows_live_endpoint")
@windows_live_endpoint.setter
def windows_live_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "windows_live_endpoint", value)
|
[
"[email protected]"
] | |
4ae64eb893a2bc7e08bc9513e6d66cb09bb75910
|
d9c95cd0efad0788bf17672f6a4ec3b29cfd2e86
|
/disturbance/migrations/0028_auto_20200505_1219.py
|
64dd3427825f2d5b508840eb281397175b84262b
|
[
"Apache-2.0"
] |
permissive
|
Djandwich/disturbance
|
cb1d25701b23414cd91e3ac5b0207618cd03a7e5
|
b1ba1404b9ca7c941891ea42c00b9ff9bcc41237
|
refs/heads/master
| 2023-05-05T19:52:36.124923 | 2021-06-03T06:37:53 | 2021-06-03T06:37:53 | 259,816,629 | 1 | 1 |
NOASSERTION
| 2021-06-03T09:46:46 | 2020-04-29T03:39:33 |
Python
|
UTF-8
|
Python
| false | false | 1,225 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2020-05-05 04:19
from __future__ import unicode_literals
import disturbance.components.compliances.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('disturbance', '0027_auto_20200505_1156'),
]
operations = [
migrations.CreateModel(
name='OnSiteInformation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('period_from', models.DateField(blank=True, null=True)),
('period_to', models.DateField(blank=True, null=True)),
('comments', models.TextField(blank=True)),
('apiary_site', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='disturbance.ApiarySite')),
],
),
migrations.AlterField(
model_name='compliancedocument',
name='_file',
field=models.FileField(max_length=500, upload_to=disturbance.components.compliances.models.update_proposal_complaince_filename),
),
]
|
[
"[email protected]"
] | |
8583f5bb6cababfed20301df5b0a8835b294325b
|
c7330806e61bb03e69e859b2ed33ae42fc7916e6
|
/Discovery/Content/CollectionBook.py
|
74ca10f7499a72757dade2cde554ef893670d284
|
[] |
no_license
|
ixfalia/ZeroProjects
|
ec2f91000a5ce014f7413f32873b10fb01a3ed20
|
17bd2c0f9c3a5ef3705b008f6b128d589aef4168
|
refs/heads/master
| 2020-06-03T05:20:44.370134 | 2019-06-24T05:01:18 | 2019-06-24T05:01:18 | 191,456,493 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,520 |
py
|
import Zero
import Events
import Property
import VectorMath
class Marvel:
def __init__(self, name, text, discovered = False, color = None):
self.Name = name
self.Text = text
self.Discovered = discovered
#endclass
class Collectable:
def __init__(self, name, amount, text = None):
self.Name = name
self.Amount = amount
self.Text = text
class CollectionBook:
def DefineProperties(self):
#self.Lives = Property.Int(9)
self.CollectionResource = Property.ResourceTable()
pass
def Initialize(self, initializer):
self.CollectionTable = {}
self.MarvelTable = {}
self.Marvels = {}
Zero.Connect(self.Owner, "CollectionEvent", self.onCollection)
Zero.Connect(self.Owner, "MarvelEvent", self.onMarvel)
pass
def addMarvel(self, name, text, discovered = False):
if not discovered:
discovered = False
self.MarvelTable[name] = Marvel(name, text, discovered)
m = self.getMarvel(name)
print(m.Name, m.Discovered, m.Text)
print(self.MarvelTable)
def discoverMarvel(self, name, discoverState = True):
last = self.MarvelTable[name].Discovered
self.Marvels[name] = discoverState
self.MarvelTable[name].Discovered = discoverState
if not last and discoverState:
self.makeMarvelMessage("marvel")
else:
self.makeMarvelMessage("gotit")
def makeMarvelMessage(self, name):
children = self.Owner.PlayerTracker.Camera.Children
for child in children:
if child.Name == name:
child.Celebration.onActivation(None)
def getMarvel(self, name):
if name in self.MarvelTable:
return self.MarvelTable[name]
else:
return None
def getMarvelCount(self):
count = 0
for m in self.MarvelTable:
if m.Discovered:
count += 1
return count
def getTotalMarvels(self):
return len(self.MarvelTable)
def addCollection(self, name, amount = 1, description = None):
print(amount)
raise
if not name in self.CollectionTable:
self.CollectionTable[name] = 0
self.CollectionTable[name] += amount #to just add entry put amount = 0, to subtract just put amount = -#
def getCollection(self, name):
if name in self.CollectionTable:
return self.CollectionTable[name]
else:
return None
def onCollection(self, CollectionEvent):
name = CollectionEvent.Name
if CollectionEvent.Amount:
amount = CollectionEvent.Amount
else:
amount = 1
if self.CollectionEvent.TextBlock:
description = CollectionEvent.TextBlock
self.addCollection(name, CollectionEvent.Amount, CollectionEvent.TextBlock)
def onMarvel(self, MarvelEvent):
name = MarvelEvent.Name
discovered = MarvelEvent.Discovered
print("onMarvel:", name in self.MarvelTable, self.MarvelTable)
if not name in self.MarvelTable:
self.addMarvel(name, MarvelEvent.Text)
print("onMarvel after Add:", self.MarvelTable)
else:
self.discoverMarvel(name, discovered)
Zero.RegisterComponent("CollectionBook", CollectionBook)
|
[
"[email protected]"
] | |
326d0c7de68e073287f3a396f8a137a169102766
|
139af68b78734a6bc53bd942ffa05476baf3d71d
|
/PYTHON OOP/Previous Exams/Python OOP Exam - 10 Apr 2021/exam_skeleton/project/aquarium/aquarium/freshwater_aquarium.py
|
d117fdaea034b8266d55f12414f36f536bf8cbd0
|
[] |
no_license
|
MiroVatov/Python-SoftUni
|
7fe3fc0a3928848c5317fb120f789c773bfc117e
|
0d0d6f116281b4de8c413d254386e27d992d047b
|
refs/heads/main
| 2023-08-24T09:44:31.261137 | 2021-10-18T14:04:03 | 2021-10-18T14:04:03 | 317,510,574 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 234 |
py
|
from project.aquarium.base_aquarium import BaseAquarium
class FreshwaterAquarium(BaseAquarium):
def __init__(self, name):
super().__init__(name, capacity=50)
self.aquarium_fish_type = "FreshwaterFish"
|
[
"[email protected]"
] | |
60cfc9014c29b483d349ca6430bea75baef9e675
|
5164dc11387ac2bab5d4bcabf7c1ce0e63cdbaaa
|
/appenlight_client/ext/logging/logbook.py
|
5ec8ef3a02b434ec8892e436aead2f68d0adc3fe
|
[
"BSD-3-Clause"
] |
permissive
|
jpwilliams/appenlight-client-python
|
ba472035794556c6624284150b5724c280eae7b0
|
9f3b0fd7d7035bcafb11e686c218dd1393909912
|
refs/heads/master
| 2020-12-24T16:15:00.770966 | 2015-09-01T15:31:45 | 2015-09-01T15:31:45 | 41,746,574 | 0 | 0 | null | 2015-09-01T15:23:01 | 2015-09-01T15:23:00 | null |
UTF-8
|
Python
| false | false | 3,050 |
py
|
from __future__ import absolute_import
import logbook
import logging
import threading
import datetime
import time
from appenlight_client.ext.logging import EXCLUDED_LOG_VARS
from appenlight_client.timing import get_local_storage
from appenlight_client.utils import asbool, parse_tag, PY3
log = logging.getLogger(__name__)
class ThreadLocalHandler(logbook.Handler):
def __init__(self, client_config=None, *args, **kwargs):
logbook.Handler.__init__(self, *args, **kwargs)
self.ae_client_config = client_config
def emit(self, record):
appenlight_storage = get_local_storage()
r_dict = convert_record_to_dict(record, self.ae_client_config)
if r_dict:
if r_dict not in appenlight_storage.logs:
appenlight_storage.logs.append(r_dict)
def get_records(self, thread=None):
"""
Returns a list of records for the current thread.
"""
appenlight_storage = get_local_storage()
return appenlight_storage.logs
def clear_records(self, thread=None):
""" Clears ALL logs from AE storage """
appenlight_storage = get_local_storage()
appenlight_storage.logs = []
def convert_record_to_dict(record, client_config):
if record.channel in client_config.get('log_namespace_blacklist', []):
return None
if not getattr(record, 'time'):
time_string = datetime.datetime.utcnow().isoformat()
else:
time_string = record.time.isoformat()
try:
message = record.msg
tags_list = []
log_dict = {'log_level': record.level_name,
"namespace": record.channel,
'server': client_config.get('server_name', 'unknown'),
'date': time_string,
'request_id': None}
if PY3:
log_dict['message'] = '%s' % message
else:
msg = message.encode('utf8') if isinstance(message,
unicode) else message
log_dict['message'] = '%s' % msg
if client_config.get('logging_attach_exc_text'):
pass
# populate tags from extra
for k, v in record.extra.iteritems():
if k not in EXCLUDED_LOG_VARS:
try:
tags_list.append(parse_tag(k, v))
if k == 'ae_primary_key':
log_dict['primary_key'] = unicode(v)
if k == 'ae_permanent':
try:
log_dict['permanent'] = asbool(v)
except Exception:
log_dict['permanent'] = True
except Exception as e:
log.info(u'Couldn\'t convert attached tag %s' % e)
if tags_list:
log_dict['tags'] = tags_list
return log_dict
except (TypeError, UnicodeDecodeError, UnicodeEncodeError) as e:
# handle some weird case where record.getMessage() fails
log.warning(e)
|
[
"[email protected]"
] | |
432c617ff8df039ffb316cefcc9e72a422d2bb97
|
39b9a28d721ef3a06726004408bd9788ddcaf0bd
|
/omokAI/Omok.py
|
2559dcd299b977c26a2a32025df4e7c31faa3e05
|
[] |
no_license
|
handaeho/Alpha_Omokjomok
|
560de9c4a3376d4b3d46f43592bc38ec39fa630e
|
8760a9b4c5748715fd811e4c569701e63c7c6bc7
|
refs/heads/master
| 2023-03-19T04:11:34.762095 | 2023-03-13T14:16:55 | 2023-03-13T14:16:55 | 241,247,484 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,590 |
py
|
from rule import *
from pygame.locals import *
import random
import pygame, sys
bg_color = (128, 128, 128)
black = (0, 0, 0)
blue = (0, 50, 255)
white = (255, 255, 255)
red = (255, 0, 0)
green = (0, 200, 0)
window_width = 800
window_height = 500
board_width = 500
grid_size = 30
fps = 60
fps_clock = pygame.time.Clock()
class Omok(object):
def __init__(self, surface):
self.board = [[0 for i in range(board_size)] for j in range(board_size)]
self.menu = Menu(surface)
self.rule = Rule(self.board)
self.surface = surface
self.pixel_coords = []
self.set_coords()
self.set_image_font()
self.is_show = True
def init_game(self):
self.turn = black_stone
self.draw_board()
self.menu.show_msg(empty)
self.init_board()
self.coords = []
self.id = 1
self.is_gameover = False
self.is_forbidden = False
def set_image_font(self):
black_img = pygame.image.load('image/black.png')
white_img = pygame.image.load('image/white.png')
self.last_w_img = pygame.image.load('image/white_a.png')
self.last_b_img = pygame.image.load('image/black_a.png')
self.board_img = pygame.image.load('image/board.png')
self.forbidden_img = pygame.image.load('image/forbidden.png')
self.font = pygame.font.Font("freesansbold.ttf", 14)
self.black_img = pygame.transform.scale(black_img, (grid_size, grid_size))
self.white_img = pygame.transform.scale(white_img, (grid_size, grid_size))
def init_board(self):
for y in range(board_size):
for x in range(board_size):
self.board[y][x] = 0
def draw_board(self):
self.surface.blit(self.board_img, (0, 0))
def draw_image(self, img_index, x, y):
img = [self.black_img, self.white_img, self.last_b_img, self.last_w_img, self.forbidden_img]
self.surface.blit(img[img_index], (x, y))
def show_number(self, x, y, stone, number):
colors = [white, black, red, red]
color = colors[stone]
self.menu.make_text(self.font, str(number), color, None, y + 15, x + 15, 1)
def hide_numbers(self):
for i in range(len(self.coords)):
x, y = self.coords[i]
self.draw_image(i % 2, x, y)
if self.coords:
x, y = self.coords[-1]
self.draw_image(i % 2 + 2, x, y)
def show_numbers(self):
for i in range(len(self.coords)):
x, y = self.coords[i]
self.show_number(x, y, i % 2, i + 1)
if self.coords:
x, y = self.coords[-1]
self.draw_image(i % 2, x, y)
self.show_number(x, y, i % 2 + 2, i + 1)
def check_forbidden(self):
if self.turn == black_stone:
coords = self.rule.get_forbidden_points(self.turn)
while coords:
x, y = coords.pop()
x, y = x * grid_size + 25, y * grid_size + 25
self.draw_image(4, x, y)
self.is_forbidden = True
def draw_stone(self, coord, stone, increase):
if self.is_forbidden:
self.draw_board()
x, y = self.get_point(coord)
self.board[y][x] = stone
self.hide_numbers()
if self.is_show:
self.show_numbers()
self.id += increase
self.turn = white_stone
self.check_forbidden()
def draw_stone_w(self, coord, stone, increase):
if self.is_forbidden:
self.draw_board()
x, y = self.get_point(coord)
self.draw_image(1, x, y)
self.hide_numbers()
if self.is_show:
self.show_numbers()
self.id += increase
self.turn = black_stone
self.check_forbidden()
def set_coords(self):
for y in range(board_size):
for x in range(board_size):
self.pixel_coords.append((x * grid_size + 25, y * grid_size + 25))
def get_pixel(self, x, y):
x1 = (x * 30) + 25
y1 = (y * 30) + 25
return x1, y1
def get_coord(self, pos):
for coord in self.pixel_coords:
x, y = coord
rect = pygame.Rect(x, y, grid_size, grid_size)
if rect.collidepoint(pos):
return coord
return None
def get_point(self, coord):
x, y = coord
x = (x - 25) // grid_size
y = (y - 25) // grid_size
return x, y
def check_board_black(self, pos):
coord = self.get_coord(pos)
if not coord:
return False
x, y = self.get_point(coord)
if self.board[y][x] != empty:
print("occupied")
return True
if self.turn == black_stone:
if self.rule.forbidden_point(x, y, self.turn):
print("forbidden point")
return True
self.coords.append(coord)
self.draw_stone(coord, self.turn, 1)
if self.check_gameover(coord, 3 - self.turn):
self.is_gameover = True
for x in range(0, 15):
print(self.board[x][0], self.board[x][1], self.board[x][2], self.board[x][3], self.board[x][4],
self.board[x][5], self.board[x][6], self.board[x][7], self.board[x][8],
self.board[x][9], self.board[x][10], self.board[x][11], self.board[x][12], self.board[x][13],
self.board[x][14])
print("\n")
return True
def check_board_white(self, y, x):
coord = self.get_pixel(x, y)
self.coords.append(coord)
self.draw_stone_w(coord, self.turn, 1)
if self.check_gameover(coord, 3 - self.turn):
self.is_gameover = True
for x in range(0, 15):
print(self.board[x][0], self.board[x][1], self.board[x][2], self.board[x][3], self.board[x][4],
self.board[x][5], self.board[x][6], self.board[x][7], self.board[x][8],
self.board[x][9], self.board[x][10], self.board[x][11], self.board[x][12], self.board[x][13],
self.board[x][14])
print("\n")
return True
def check_gameover(self, coord, stone):
x, y = self.get_point(coord)
if self.id > board_size * board_size:
self.show_winner_msg(stone)
return True
elif self.rule.is_gameover(x, y, stone):
self.show_winner_msg(stone)
return True
return False
def show_winner_msg(self, stone):
for i in range(3):
self.menu.show_msg(stone)
pygame.display.update()
pygame.time.delay(200)
self.menu.show_msg(empty)
pygame.display.update()
pygame.time.delay(200)
self.menu.show_msg(stone)
class Menu(object):
def __init__(self, surface):
self.font = pygame.font.Font('freesansbold.ttf', 20)
self.surface = surface
self.draw_menu()
def draw_menu(self):
top, left = window_height - 30, window_width - 200
self.new_rect = self.make_text(self.font, 'New Game', blue, None, top - 30, left)
self.quit_rect = self.make_text(self.font, 'Quit Game', blue, None, top, left)
def show_msg(self, msg_id):
msg = {
empty: ' ',
black_stone: 'Black win!!!',
white_stone: 'White win!!!',
tie: 'Tie',
}
center_x = window_width - (window_width - board_width) // 2
self.make_text(self.font, msg[msg_id], black, bg_color, 30, center_x, 1)
def make_text(self, font, text, color, bgcolor, top, left, position=0):
surf = font.render(text, False, color, bgcolor)
rect = surf.get_rect()
if position:
rect.center = (left, top)
else:
rect.topleft = (left, top)
self.surface.blit(surf, rect)
return rect
def check_rect(self, pos, omok):
if self.new_rect.collidepoint(pos):
return True
elif self.quit_rect.collidepoint(pos):
self.terminate(self)
return False
def terminate(self):
pygame.quit()
sys.exit()
def is_continue(self, omok):
while True:
for event in pygame.event.get():
if event.type == QUIT:
self.terminate()
elif event.type == MOUSEBUTTONUP:
if (self.check_rect(event.pos, omok)):
return
pygame.display.update()
fps_clock.tick(fps)
|
[
"[email protected]"
] | |
da196e42ba4f3cf221f3ee5836971d6489da71ed
|
95d1dd5758076c0a9740d545a6ef2b5e5bb8c120
|
/PY/algorithm/expression_evaluation.py
|
897fa67d8b5d6359098f536be23c4e30788a29fd
|
[] |
no_license
|
icoding2016/study
|
639cb0ad2fe80f43b6c93c4415dc6e8a11390c85
|
11618c34156544f26b3b27886b55c771305b2328
|
refs/heads/master
| 2023-08-31T14:15:42.796754 | 2023-08-31T05:28:38 | 2023-08-31T05:28:38 | 117,061,872 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,254 |
py
|
# Expression Evaluation (infix)
# Medium
#
# to support basic calculation with:
# - operator: + - * / ^
# - operand: int/float
# - Parenthesis are permitted
# - blank(s) allowed in the expression
#
#
# Solution include 2 parts:
# 1) parse the str, identify operators and operands
# 2) Expression Evaluate
# Method ref: http://csis.pace.edu/~murthy/ProgrammingProblems/Programming_Problems.htm#16_Evaluation_of_infix_expressions
#
# Expression Evaluate:
# Use two stacks:
# Operand stack: to keep values (numbers) and
# Operator stack: to keep operators (+, -, *, . and ^).
# In the following, “process” means,
# (i) pop operand stack once (value2)
# (ii) pop operator stack once (operator)
# (iii) pop operand stack again (value1)
# (iv) compute value1 operator value2
# (v) push the value obtained in operand stack.
# Algorithm:
# Until the end of the expression is reached, get one character and perform only one of the steps (a) through (f):
# (a) If the character is an operand, push it onto the operand stack.
# (b) If the character is an operator, and the operator stack is empty then push it onto the operator stack.
# (c) If the character is an operator and the operator stack is not empty, and the character's precedence is
# greater than the precedence of the stack top of operator stack, then push the character onto the operator stack.
# (d) If the character is "(", then push it onto operator stack.
# (e) If the character is ")", then "process" as explained above until the corresponding "(" is encountered in operator stack.
# At this stage POP the operator stack and ignore "(."
# (f) If cases (a), (b), (c), (d) and (e) do not apply, then process as explained above.
# When there are no more input characters, keep processing until the operator stack becomes empty.
# The values left in the operand stack is the final result of the expression.
#
# e.g.
# 10*3-((6+5)-2*4)^2+20/4 =
#
class InvalidInput(Exception):
pass
class ExpressionEvaluator(object):
OPERATOR = ['+', '-', '*', '/', '^', '(', ')']
PRIO = {'(':4, ')':4, '^':3, '*':2, '/':2, '+':1, '-':1}
def __init__(self):
#self._init_evaluator()
pass
def _init_evaluator(self):
self.operators = []
self.operands = []
def parse(self, expression: str) -> list[str]:
"""Prase the expression str, split the operators and operand into a list."""
output = []
number = ''
number_exp = [str(i) for i in range(0,10)] + ['.']
expression.replace(' ', '') # remove blanks
for c in expression:
if c in self.OPERATOR:
if number:
output.append(number)
number = ''
output.append(c)
elif c in number_exp:
number += c
else:
raise InvalidInput('Expression contains invalid operator/number')
if number:
output.append(number)
# print(output)
return output
def evaluate(self, expression: str):
self._init_evaluator()
if len(expression) < 3:
raise InvalidInput('Invalid input.')
for c in self.parse(expression):
if c not in self.OPERATOR:
self.operands.append(c)
elif not self.operators:
self.operators.append(c)
elif c == '(':
self.operators.append(c)
elif c == ')':
# process until reach '('
while self.operators[-1]!='(':
self.process()
if self.operators[-1]=='(':
self.operators.pop() # pop '('
elif self.PRIO[c] > self.PRIO[self.operators[-1]]:
self.operators.append(c)
elif self.PRIO[c] <= self.PRIO[self.operators[-1]]:
while self.operators and self.operators[-1]!='(':
self.process()
self.operators.append(c)
while self.operators and self.operators[-1]!='(':
self.process()
return self.operands[0]
def process(self):
if not self.operands or not self.operators:
return
v2 = self.operands.pop()
op = self.operators.pop()
v1 = self.operands.pop()
if op == '^':
exp = f'int({v1})**int({v2})'
else:
exp = f'{v1}{op}{v2}'
self.operands.append(str(eval(exp)))
def test_fixture(s:ExpressionEvaluator):
testdata = [ # (input, expect),
(('3+5*2-1',), 12),
(('2*3-((6+5)-2*4)+2',), 5),
(('2+3-(2^2+3*(5-6/2)^2)*2+20',),-7),
(('5-6/2',),2.0),
(('3-2',),1),
(('4*5',),20),
(('3.0*2+(4.0+2.5*(4/2)+3.5*2)/2.0',),14.0),
]
for i in range(len(testdata)):
ret = s.evaluate(*testdata[i][0])
exp = str(testdata[i][1])
#exp = s.maxProfit_bf(*testdata[i][0])
print("{} -> \t{} \t expect {}".format("testdata[i][0]", ret, exp), end='\t')
print("{}".format('pass' if ret==exp else 'fail'))
import timeit
def test():
s = ExpressionEvaluator()
test_fixture(s)
test()
|
[
"[email protected]"
] | |
cd7fa9170f10eae6dfe847fde231d58eef59b1ef
|
db302f4f35f9c9df55ae9bbaf95f53116b77a7a8
|
/specviz/core/hub.py
|
d893da489fe0acb1960325d07b8ab8321fe02157
|
[
"BSD-3-Clause"
] |
permissive
|
imagineagents/specviz
|
c8c9ef033397a20029cfbd5972594a1d1cf0ee06
|
099e05ed40a7db56f338c3b89e3a8ec646586ac7
|
refs/heads/master
| 2020-04-04T22:57:21.131095 | 2018-11-05T21:46:48 | 2018-11-05T21:46:48 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,169 |
py
|
import logging
from .items import DataItem
class Hub:
def __init__(self, workspace, *args, **kwargs):
self._workspace = workspace
@property
def workspace(self):
"""The active workspace."""
return self._workspace
@property
def model(self):
"""The data item model of the active workspace."""
return self.workspace.model
@property
def proxy_model(self):
"""The proxy model of the active workspace."""
return self.workspace.proxy_model
@property
def plot_window(self):
"""The currently selected plot window of the workspace."""
return self.workspace.current_plot_window
@property
def plot_windows(self):
"""The currently selected plot window of the workspace."""
return self.workspace.mdi_area.subWindowList()
@property
def plot_widget(self):
"""The plot widget of the currently active plot window."""
return self.workspace.current_plot_window.plot_widget
@property
def plot_item(self):
"""The currently selected plot item."""
if self.workspace is not None:
return self.workspace.current_item
@property
def plot_items(self):
"""Returns the currently selected plot item."""
return self.proxy_model.items
@property
def visible_plot_items(self):
"""Plotted data that are currently visible."""
if self.plot_widget is not None:
return self.plot_widget.listDataItems()
@property
def selected_region(self):
"""The currently active ROI on the plot."""
return self.plot_window.plot_widget.selected_region
@property
def selected_region_bounds(self):
"""The bounds of currently active ROI on the plot."""
return self.plot_window.plot_widget.selected_region_bounds
@property
def data_item(self):
"""The data item of the currently selected plot item."""
if self.plot_item is not None:
return self.plot_item.data_item
@property
def data_items(self):
"""List of all data items held in the data item model."""
return self.model.items
def append_data_item(self, data_item):
"""
Adds a new data item object to appear in the left data list view.
Parameters
----------
data_item : :class:`~specviz.core.items.PlotDataItem`
The data item to be added to the list view.
"""
if isinstance(data_item, DataItem):
self.workspace.model.appendRow(data_item)
else:
logging.error("Data item model only accepts items of class "
"'DataItem', received '{}'.".format(type(data_item)))
def plot_data_item_from_data_item(self, data_item):
"""
Returns the PlotDataItem associated with the provided DataItem.
Parameters
----------
data_item : :class:`~specviz.core.items.PlotDataItem`
The DataItem from which the associated PlotDataItem will be
returned.
Returns
-------
plot_data_item : :class:`~specviz.core.items.PlotDataItem`
The PlotDataItem wrapping the DataItem.
"""
plot_data_item = self.workspace.proxy_model.item_from_id(
data_item.identifier)
return plot_data_item
def set_active_plugin_bar(self, name=None, index=None):
"""
Sets the currently displayed widget in the plugin side panel.
Parameters
----------
name : str, optional
The displayed name of the widget in the tab title.
index : int, optional
The index of the widget in the plugin tab widget.
"""
if name is None and index is None:
return
elif index is not None:
self.workspace.plugin_tab_widget.setCurrentIndex(index)
elif name is not None:
for i in range(self.workspace.plugin_tab_widget.count()):
if self.workspace.plugin_tab_widget.tabText(i) == name:
self.workspace.plugin_tab_widget.setCurrentIndex(i)
|
[
"[email protected]"
] | |
2a45ff408fd370aab4729c41774fcc2437b0919b
|
62fe26b309b402d0c304624bf63a8e9b9c025148
|
/backend/src/food/migrations/0008_auto_20160526_1948.py
|
c5a13f42d2b89acff07771bcad8c733195ccddf5
|
[] |
no_license
|
brmley/fuchtard
|
20c9c8229debaf54897faabffa43d87bff1f0995
|
833bcb9655fff9ff733f798d19561d9b7e6c476c
|
refs/heads/master
| 2021-09-06T15:13:13.317894 | 2018-02-07T22:08:54 | 2018-02-07T22:08:54 | 120,173,994 | 0 | 0 | null | 2018-02-04T10:12:36 | 2018-02-04T10:12:35 | null |
UTF-8
|
Python
| false | false | 456 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-05-26 12:48
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('food', '0007_foodcategory_slug'),
]
operations = [
migrations.AlterModelOptions(
name='discount',
options={'verbose_name': 'Скидка', 'verbose_name_plural': 'Скидки'},
),
]
|
[
"[email protected]"
] | |
32089584dd27fac775fd61e0b1956be135b7f3ec
|
8ad9faa828ce54cddc38dc86eef30e6635babd0c
|
/RestPy/ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/tlvprofile/type.py
|
be7ed87f72c7869b64748c8b290d96533fa14e13
|
[
"MIT"
] |
permissive
|
ralfjon/IxNetwork
|
d1a50069bc5a211f062b2b257cb6775e7cae8689
|
c0c834fbc465af69c12fd6b7cee4628baba7fff1
|
refs/heads/master
| 2020-04-04T00:36:24.956925 | 2018-10-26T16:37:13 | 2018-10-26T16:37:13 | 155,655,988 | 0 | 0 |
MIT
| 2018-11-01T03:19:30 | 2018-11-01T03:19:30 | null |
UTF-8
|
Python
| false | false | 2,820 |
py
|
# Copyright 1997 - 2018 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class Type(Base):
"""The Type class encapsulates a required type node in the ixnetwork hierarchy.
An instance of the class can be obtained by accessing the Type property from a parent instance.
The internal properties list will contain one and only one set of properties which is populated when the property is accessed.
"""
_SDM_NAME = 'type'
def __init__(self, parent):
super(Type, self).__init__(parent)
@property
def Object(self):
"""An instance of the Object class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tlvprofile.object.Object)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tlvprofile.object import Object
return Object(self)
@property
def IsEditable(self):
"""Indicates whether this is editable or not
Returns:
bool
"""
return self._get_attribute('isEditable')
@IsEditable.setter
def IsEditable(self, value):
self._set_attribute('isEditable', value)
@property
def IsRequired(self):
"""Indicates whether this is required or not
Returns:
bool
"""
return self._get_attribute('isRequired')
@IsRequired.setter
def IsRequired(self, value):
self._set_attribute('isRequired', value)
@property
def Name(self):
"""Name of the node
Returns:
str
"""
return self._get_attribute('name')
@Name.setter
def Name(self, value):
self._set_attribute('name', value)
|
[
"[email protected]"
] | |
17524959bbf4502b8a5089c5ceb2e5302c989256
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/63/usersdata/189/29214/submittedfiles/swamee.py
|
68f4b3257c60ef461e7b8c0b735fb769787a3d60
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 396 |
py
|
import math
f= float(input('digite f:'))
l= float(input('digite l:'))
q= float(input('digite q:'))
deltaH= float(input('digite deltaH:'))
v= float(input('digite v:'))
d=(8*f*l)*(q*q)/((3.14159*3.14159)*(9.81*deltaH))
rey=(4*q)/(math.pi*d*v)
k=0.25/(math.log10(0.000002/3.7*d+5.74/rey**0.9))**2
print('O valor de D é %.4f' %d)
print('O valor de Rey é %.4f' %rey)
print('O valor de K é %.4f' %k)
|
[
"[email protected]"
] | |
736153dda8380835b19679e18d2ea20c3f6b2f72
|
ca41bc15576624f4be22c777833b6dbf80a3d5f9
|
/dolly/usuarios/migrations/0002_auto_20201217_0129.py
|
967d04b2b9a94870212d7fcd8ef2a34be0324eac
|
[] |
no_license
|
aris-osorio/dolly
|
74840477e01a020dfaaaf3a4e94c4f95f48f690e
|
256042bae4d4253fbc93f50aa125047e5090b68c
|
refs/heads/main
| 2023-02-01T14:48:19.840785 | 2020-12-17T07:30:34 | 2020-12-17T07:30:34 | 321,873,299 | 0 | 0 | null | 2020-12-17T06:51:59 | 2020-12-16T04:58:55 |
Python
|
UTF-8
|
Python
| false | false | 442 |
py
|
# Generated by Django 2.2.14 on 2020-12-17 07:29
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('usuarios', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='usuario',
name='id',
field=models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False),
),
]
|
[
"[email protected]"
] | |
6f8c0cd76c7b346759c4b53b13862f9053534529
|
7bed80964437032d9e1faf801153d79e089adff5
|
/_downloads/plot_haxby_anova_svm1.py
|
5d707d298495ba5eb28e885ad254c9dcd6e25dd0
|
[] |
no_license
|
MartinPerez/nilearn.github.io
|
56fccaf0997cbf34e7929fac1da7eac26b453537
|
f4f2438b5dae7aafe12ab96b24a941f5e0dde4b1
|
refs/heads/master
| 2021-01-22T10:25:49.288164 | 2015-07-13T21:31:42 | 2015-07-13T21:31:42 | 39,084,452 | 0 | 0 | null | 2015-07-14T15:46:53 | 2015-07-14T15:46:53 | null |
UTF-8
|
Python
| false | false | 4,392 |
py
|
"""
The haxby dataset: face vs house in object recognition
=======================================================
A significant part of the running time of this example is actually spent
in loading the data: we load all the data but only use the face and
houses conditions.
"""
### Load Haxby dataset ########################################################
from nilearn import datasets
import numpy as np
dataset_files = datasets.fetch_haxby_simple()
y, session = np.loadtxt(dataset_files.session_target).astype("int").T
conditions = np.recfromtxt(dataset_files.conditions_target)['f0']
mask = dataset_files.mask
# fmri_data.shape is (40, 64, 64, 1452)
# and mask.shape is (40, 64, 64)
### Preprocess data ###########################################################
### Restrict to faces and houses ##############################################
# Keep only data corresponding to faces or houses
condition_mask = np.logical_or(conditions == 'face', conditions == 'house')
y = y[condition_mask]
session = session[condition_mask]
conditions = conditions[condition_mask]
# We have 2 conditions
n_conditions = np.size(np.unique(y))
### Loading step ##############################################################
from nilearn.input_data import NiftiMasker
# For decoding, standardizing is often very important
nifti_masker = NiftiMasker(mask=mask, sessions=session, smoothing_fwhm=4,
standardize=True, memory="nilearn_cache",
memory_level=1)
X = nifti_masker.fit_transform(dataset_files.func)
# Apply our condition_mask
X = X[condition_mask]
### Prediction function #######################################################
### Define the prediction function to be used.
# Here we use a Support Vector Classification, with a linear kernel
from sklearn.svm import SVC
svc = SVC(kernel='linear')
### Dimension reduction #######################################################
from sklearn.feature_selection import SelectKBest, f_classif
### Define the dimension reduction to be used.
# Here we use a classical univariate feature selection based on F-test,
# namely Anova. We set the number of features to be selected to 500
feature_selection = SelectKBest(f_classif, k=500)
# We have our classifier (SVC), our feature selection (SelectKBest), and now,
# we can plug them together in a *pipeline* that performs the two operations
# successively:
from sklearn.pipeline import Pipeline
anova_svc = Pipeline([('anova', feature_selection), ('svc', svc)])
### Fit and predict ###########################################################
anova_svc.fit(X, y)
y_pred = anova_svc.predict(X)
### Visualisation #############################################################
### Look at the SVC's discriminating weights
coef = svc.coef_
# reverse feature selection
coef = feature_selection.inverse_transform(coef)
# reverse masking
weight_img = nifti_masker.inverse_transform(coef)
### Create the figure
from nilearn import image
import matplotlib.pyplot as plt
from nilearn.plotting import plot_stat_map
# Plot the mean image because we have no anatomic data
mean_img = image.mean_img(dataset_files.func)
plot_stat_map(weight_img, mean_img, title='SVM weights')
### Saving the results as a Nifti file may also be important
import nibabel
nibabel.save(weight_img, 'haxby_face_vs_house.nii')
### Cross validation ##########################################################
from sklearn.cross_validation import LeaveOneLabelOut
### Define the cross-validation scheme used for validation.
# Here we use a LeaveOneLabelOut cross-validation on the session label
# divided by 2, which corresponds to a leave-two-session-out
cv = LeaveOneLabelOut(session // 2)
### Compute the prediction accuracy for the different folds (i.e. session)
cv_scores = []
for train, test in cv:
anova_svc.fit(X[train], y[train])
y_pred = anova_svc.predict(X[test])
cv_scores.append(np.sum(y_pred == y[test]) / float(np.size(y[test])))
### Print results #############################################################
### Return the corresponding mean prediction accuracy
classification_accuracy = np.mean(cv_scores)
### Printing the results
print "=== ANOVA ==="
print "Classification accuracy: %f" % classification_accuracy, \
" / Chance level: %f" % (1. / n_conditions)
# Classification accuracy: 0.986111 / Chance level: 0.500000
plt.show()
|
[
"[email protected]"
] | |
f694f3dae80fbbf984c2319b63e7c6a577940e56
|
33a0d5ec6ca440986f22b010ffb310bf34c4fcac
|
/Basic_grammar/文件读写/当前目录下文件.py
|
cb5b5c8f00f064aa786aafd82a96d58b531d59c2
|
[] |
no_license
|
zhaozongzhao/learngit
|
c3de619f07840839819ffee4bbacb590caba8dbe
|
a6471e6d63b298882ceed020cc3b56e457ed2ca0
|
refs/heads/master
| 2023-07-21T23:04:51.249300 | 2021-09-17T10:29:54 | 2021-09-17T10:29:54 | 100,707,131 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 575 |
py
|
#能在当前目录以及当前目录的所有子目录下查找文件名包含指定字符串的文件,并打印出相对路径
import os
def get_walk(patah):
list = []
for i in os.walk(patah):
list.append(i)
return list
def get_file_path(zzz):
path = os.getcwd()
filename = get_walk(path)
list = []
for j in filename:
for name in j[2]:
if zzz in name:
list.append(name)
path1 = os.path.split(path)
for i in list:
path2 = os.path.join(path1[0],i)
print(path2)
get_file_path('file')
|
[
"[email protected]"
] | |
1120dbc166dbe15daef9175ed8f8fb4716705a95
|
0d0afd1dce972b4748ce8faccd992c019794ad9e
|
/integra/integra_crm/models/__init__.py
|
9975a438862c880f17b75251bec25d7b121421bd
|
[] |
no_license
|
danimaribeiro/odoo-erp
|
e2ca2cfe3629fbedf413e85f7c3c0453fd16941e
|
d12577bf7f5266b571cbedeb930720d653320e96
|
refs/heads/master
| 2020-01-23T21:32:16.149716 | 2016-11-05T15:35:40 | 2016-11-05T15:35:40 | 67,892,809 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 239 |
py
|
# -*- coding: utf-8 -*-
#from __future__ import division, print_function, unicode_literals
from crm_motivo import crm_motivo
from crm_lead import crm_lead
from crm_lead_report import crm_lead_report
from sale_report import sale_report
|
[
"[email protected]"
] | |
0f2e65ef53114eaa498af4a3c30172d850c94f92
|
1a59a9076c1e9f1eb98e24ff41a4c1c95e2b353e
|
/xcp2k/classes/_detailed_energy2.py
|
8b2ddffbe58ddb7a150d834a20b889d29d47662a
|
[] |
no_license
|
Roolthasiva/xcp2k
|
66b2f30ebeae1a946b81f71d22f97ea4076e11dc
|
fc3b5885503c6f6dc549efeb4f89f61c8b6b8242
|
refs/heads/master
| 2022-12-23T06:03:14.033521 | 2020-10-07T08:01:48 | 2020-10-07T08:01:48 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 692 |
py
|
from xcp2k.inputsection import InputSection
from xcp2k.classes._each181 import _each181
class _detailed_energy2(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Section_parameters = None
self.Add_last = None
self.Common_iteration_levels = None
self.Filename = None
self.Log_print_key = None
self.EACH = _each181()
self._name = "DETAILED_ENERGY"
self._keywords = {'Add_last': 'ADD_LAST', 'Common_iteration_levels': 'COMMON_ITERATION_LEVELS', 'Filename': 'FILENAME', 'Log_print_key': 'LOG_PRINT_KEY'}
self._subsections = {'EACH': 'EACH'}
self._attributes = ['Section_parameters']
|
[
"[email protected]"
] | |
e0b0740f3d4a3e66eeecad67f9aaf7a16848b39b
|
f1a3bd9ad5ef76204c24dc96f113c405ece21b6d
|
/workshop/migrations/0023_auto__add_field_projectversion_comment_node__add_field_projectversion_.py
|
2dede45c1223fc514d33ac4c6985f3965aba8ce4
|
[] |
no_license
|
JamesLinus/solidcomposer
|
02f83c3731774e8008d46b418f3bf4fb5d9dab36
|
ed75e576ce1c50487403437b5b537f9bfbb6397e
|
refs/heads/master
| 2020-12-28T23:50:06.745329 | 2014-01-24T02:34:41 | 2014-01-24T02:34:41 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 16,117 |
py
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ProjectVersion.comment_node'
db.add_column('workshop_projectversion', 'comment_node', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['main.SongCommentNode'], null=True, blank=True), keep_default=False)
# Adding field 'ProjectVersion.date_added'
db.add_column('workshop_projectversion', 'date_added', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2010, 7, 8, 1, 36, 12, 365269)), keep_default=False)
def backwards(self, orm):
# Deleting field 'ProjectVersion.comment_node'
db.delete_column('workshop_projectversion', 'comment_node_id')
# Deleting field 'ProjectVersion.date_added'
db.delete_column('workshop_projectversion', 'date_added')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.band': {
'Meta': {'object_name': 'Band'},
'abandon_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'concurrent_editing': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'openness': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'total_space': ('django.db.models.fields.BigIntegerField', [], {}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '110', 'unique': 'True', 'null': 'True'}),
'used_space': ('django.db.models.fields.BigIntegerField', [], {'default': '0'})
},
'main.song': {
'Meta': {'object_name': 'Song'},
'album': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'band': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Band']"}),
'comment_node': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'song_comment_node'", 'null': 'True', 'to': "orm['main.SongCommentNode']"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_open_source': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'length': ('django.db.models.fields.FloatField', [], {}),
'mp3_file': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'plugins': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'song_plugins'", 'blank': 'True', 'to': "orm['workshop.PluginDepenency']"}),
'source_file': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'studio': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['workshop.Studio']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'waveform_img': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'})
},
'main.songcommentnode': {
'Meta': {'object_name': 'SongCommentNode'},
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {}),
'date_edited': ('django.db.models.fields.DateTimeField', [], {}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.SongCommentNode']", 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'reply_disabled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'song': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Song']"})
},
'main.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
'workshop.bandinvitation': {
'Meta': {'object_name': 'BandInvitation'},
'band': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Band']"}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'count': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'expire_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'invitee'", 'null': 'True', 'to': "orm['auth.User']"}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'inviter'", 'to': "orm['auth.User']"}),
'role': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {})
},
'workshop.plugindepenency': {
'Meta': {'object_name': 'PluginDepenency'},
'comes_with_studio': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['workshop.Studio']", 'null': 'True', 'blank': 'True'}),
'external_url': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'plugin_type': ('django.db.models.fields.IntegerField', [], {}),
'price': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256'}),
'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'workshop.project': {
'Meta': {'object_name': 'Project'},
'band': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Band']"}),
'checked_out_to': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'checked_out_to'", 'null': 'True', 'to': "orm['auth.User']"}),
'date_activity': ('django.db.models.fields.DateTimeField', [], {}),
'forked_from': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'forked_from'", 'null': 'True', 'to': "orm['workshop.ProjectVersion']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latest_version': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'latest_version'", 'null': 'True', 'to': "orm['workshop.ProjectVersion']"}),
'merged_from': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'merged_from'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['workshop.ProjectVersion']"}),
'promote_voters': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'promote_voters'", 'blank': 'True', 'to': "orm['auth.User']"}),
'scrap_voters': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'scrap_voters'", 'blank': 'True', 'to': "orm['auth.User']"}),
'subscribers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'project_subscribers'", 'blank': 'True', 'to': "orm['auth.User']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Tag']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'})
},
'workshop.projectversion': {
'Meta': {'object_name': 'ProjectVersion'},
'comment_node': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.SongCommentNode']", 'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['workshop.Project']"}),
'provided_samples': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'provided_samples'", 'blank': 'True', 'to': "orm['workshop.UploadedSample']"}),
'song': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Song']", 'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.IntegerField', [], {})
},
'workshop.sampledependency': {
'Meta': {'object_name': 'SampleDependency'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'song': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Song']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'uploaded_sample': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['workshop.UploadedSample']", 'null': 'True', 'blank': 'True'})
},
'workshop.samplefile': {
'Meta': {'object_name': 'SampleFile'},
'hex_digest': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '256'})
},
'workshop.studio': {
'Meta': {'object_name': 'Studio'},
'canMerge': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'canReadFile': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'canRender': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'external_url': ('django.db.models.fields.CharField', [], {'max_length': '500', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'logo_16x16': ('django.db.models.fields.files.ImageField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'logo_large': ('django.db.models.fields.files.ImageField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'price': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'workshop.uploadedsample': {
'Meta': {'object_name': 'UploadedSample'},
'band': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Band']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sample_file': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['workshop.SampleFile']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['workshop']
|
[
"[email protected]"
] | |
5afadda78affa9fc07b1ed5f8c8dfb417881723f
|
37433c8f7ec4ff9fded3c7bcc9403e2293436552
|
/blog/admin.py
|
247a86c6e199c98ee1a1d100ca4f05337c70cdf6
|
[] |
no_license
|
FreeGodCode/TYCarry_Blog
|
4420d896e735789ac9104568e7bf59a85b796373
|
9be47be8ff1e33980f237227786bc9d472155114
|
refs/heads/master
| 2023-03-22T09:36:35.476398 | 2021-03-17T07:29:44 | 2021-03-17T07:29:44 | 314,501,947 | 1 | 0 | null | 2021-03-17T07:29:45 | 2020-11-20T09:10:48 |
Python
|
UTF-8
|
Python
| false | false | 1,040 |
py
|
from django.contrib import admin
from blog.models import Article, Category, Tag, User, ArticleComment
# Register your models here.
from django_summernote.admin import SummernoteModelAdmin
class PostAdmin(SummernoteModelAdmin):
summernote_fields = ('content') #给content字段添加富文本
list_display = ['article_id', 'title', 'created_time'] #列表显示字段
search_fields = ['title'] #搜索框
list_filter = ['created_time'] #过滤器
"""
from tinymce.models import HTMLField
class Blog(models.Model):
sblog = HTMLField()
在页面的head中添加script
<script>
tinyMCE.init({
'mode': 'textareas',
'theme': 'advanced',
'width': 800,
'height': 600,
})
</script>
"""
class CommentAdmin(admin.ModelAdmin):
list_display = ['username', 'body', 'title']
search_fields = ['title']
admin.site.register(Article, PostAdmin)
admin.site.register(Category)
admin.site.register(Tag)
admin.site.register(User)
admin.site.register(ArticleComment, CommentAdmin)
|
[
"[email protected]"
] | |
e02980246ab4a0bcc0d1250d698e64d718bd6708
|
687928e5bc8d5cf68d543005bb24c862460edcfc
|
/nssrc/com/citrix/netscaler/nitro/resource/config/vpn/vpnglobal_authenticationpolicy_binding.py
|
2d75c963ec3e23f5f802c94939e475d7b9c85737
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0"
] |
permissive
|
mbs91/nitro
|
c6c81665d6abd04de8b9f09554e5e8e541f4a2b8
|
be74e1e177f5c205c16126bc9b023f2348788409
|
refs/heads/master
| 2021-05-29T19:24:04.520762 | 2015-06-26T02:03:09 | 2015-06-26T02:03:09 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,462 |
py
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class vpnglobal_authenticationpolicy_binding(base_resource) :
""" Binding class showing the authenticationpolicy that can be bound to vpnglobal.
"""
def __init__(self) :
self._policyname = ""
self._priority = 0
self._secondary = False
self._groupextraction = False
self.___count = 0
@property
def priority(self) :
"""The priority of the policy.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
"""The priority of the policy.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def policyname(self) :
"""The name of the policy.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
"""The name of the policy.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def secondary(self) :
"""Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only to a primary authentication server but also to a secondary authentication server. User groups are aggregated across both authentication servers. The user name must be exactly the same on both authentication servers, but the authentication servers can require different passwords.
"""
try :
return self._secondary
except Exception as e:
raise e
@secondary.setter
def secondary(self, secondary) :
"""Bind the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only to a primary authentication server but also to a secondary authentication server. User groups are aggregated across both authentication servers. The user name must be exactly the same on both authentication servers, but the authentication servers can require different passwords.
"""
try :
self._secondary = secondary
except Exception as e:
raise e
@property
def groupextraction(self) :
"""Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called it primary and/or secondary authentication has succeeded.
"""
try :
return self._groupextraction
except Exception as e:
raise e
@groupextraction.setter
def groupextraction(self, groupextraction) :
"""Bind the Authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called it primary and/or secondary authentication has succeeded.
"""
try :
self._groupextraction = groupextraction
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(vpnglobal_authenticationpolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.vpnglobal_authenticationpolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = vpnglobal_authenticationpolicy_binding()
updateresource.policyname = resource.policyname
updateresource.secondary = resource.secondary
updateresource.groupextraction = resource.groupextraction
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [vpnglobal_authenticationpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].policyname = resource[i].policyname
updateresources[i].secondary = resource[i].secondary
updateresources[i].groupextraction = resource[i].groupextraction
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = vpnglobal_authenticationpolicy_binding()
deleteresource.policyname = resource.policyname
deleteresource.secondary = resource.secondary
deleteresource.groupextraction = resource.groupextraction
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [vpnglobal_authenticationpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].policyname = resource[i].policyname
deleteresources[i].secondary = resource[i].secondary
deleteresources[i].groupextraction = resource[i].groupextraction
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service) :
""" Use this API to fetch a vpnglobal_authenticationpolicy_binding resources.
"""
try :
obj = vpnglobal_authenticationpolicy_binding()
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, filter_) :
""" Use this API to fetch filtered set of vpnglobal_authenticationpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnglobal_authenticationpolicy_binding()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service) :
""" Use this API to count vpnglobal_authenticationpolicy_binding resources configued on NetScaler.
"""
try :
obj = vpnglobal_authenticationpolicy_binding()
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, filter_) :
""" Use this API to count the filtered set of vpnglobal_authenticationpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnglobal_authenticationpolicy_binding()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Staaddresstype:
IPV4 = "IPV4"
IPV6 = "IPV6"
class vpnglobal_authenticationpolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.vpnglobal_authenticationpolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.vpnglobal_authenticationpolicy_binding = [vpnglobal_authenticationpolicy_binding() for _ in range(length)]
|
[
"[email protected]"
] | |
6ebc87ec541d5938b401e1d177cedd46d683529b
|
ded0b1a8a91fd9af7bae98cce3cfadbb6d03f84d
|
/examples/upload.py
|
6c3908b418a0cf28ac0b73c0d7e2511f435ff923
|
[
"MIT"
] |
permissive
|
rjw57/bdfu
|
d38b3871ff60703f971b6cef4fae298dfa3faf73
|
386d800738e6943ed9063f1bf904ece86410c7c7
|
refs/heads/master
| 2021-01-10T21:05:08.602572 | 2015-02-26T13:58:35 | 2015-02-26T13:58:35 | 31,363,498 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 549 |
py
|
#!/usr/bin/env python
from __future__ import print_function
import os
import sys
from bdfu.client import Client
def main():
if len(sys.argv) != 4:
sys.stderr.write('usage: {0} <endpoint> <token> <file>\n'.format(
os.path.basename(sys.argv[0])))
return 1
endpoint, token, filename = sys.argv[1:]
c = Client(endpoint, token)
with open(filename, 'rb') as f:
file_id = c.upload(f)
print('uploaded file with id: ' + str(file_id))
return 0
if __name__ == '__main__':
sys.exit(main())
|
[
"[email protected]"
] | |
2c5c84d785bf038db82bc52ff81160bed04bcb1f
|
6d37c05de7d73e04f87c6ed796c77144cd8fa187
|
/Chapter6/Challenge10.py
|
4be5c2267309861fe5aace2efeae214d95106301
|
[] |
no_license
|
eizin6389/The-Self-Taught-Programmer
|
edc37ed6d95e8b24f590a6cbb9c75c0e5bd4e2e3
|
9c23612dfb11d5302cb26a359d02c88886cf986c
|
refs/heads/master
| 2022-12-08T06:47:37.541256 | 2020-08-30T07:16:01 | 2020-08-30T07:16:01 | 286,423,906 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 151 |
py
|
word = "四月の晴れた寒い日で、時計がどれも十二時を打っていた。"
index = word.index("、")
print(index)
print(word[0:index])
|
[
"[email protected]"
] | |
b1e3f6259a636db56b537dc1e6d558ffccfe0925
|
e2f9d506dcc3fee7dbbbce370c7e2c3f48275828
|
/tests/test_helpers.py
|
f817a336beaeddf6adbd9c6a2bf097ba88fc2d9f
|
[
"MIT"
] |
permissive
|
MacHu-GWU/s3splitmerge-project
|
d33829f1ff6aed9cc77c9b4bec30601ce4570f60
|
873892158f4a2d0ee20f291e5d3b2a80f0bae1ba
|
refs/heads/main
| 2023-08-30T09:07:32.312453 | 2021-11-07T16:08:24 | 2021-11-07T16:08:24 | 394,803,306 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,914 |
py
|
# -*- coding: utf-8 -*-
import pytest
from pytest import raises, approx
from s3splitmerge import exc
from s3splitmerge.helpers import (
b64encode_str, b64decode_str,
get_s3_object_metadata,
is_s3_object_exists,
count_lines_in_s3_object,
)
from s3splitmerge.tests import s3_client, bucket, prefix
def test_b64_encode_decode():
s = "s3://bucket/key"
assert b64encode_str(b64encode_str(s)) == s
def test_get_s3_object_metadata():
with raises(exc.S3ObjectNotFound):
get_s3_object_metadata(
s3_client=s3_client,
bucket=bucket,
key=f"{prefix}/helpers/{get_s3_object_metadata.__name__}/not-exists-object.json",
)
s3_client.put_object(
Bucket=bucket,
Key=f"{prefix}/helpers/{get_s3_object_metadata.__name__}/existing-object.json",
Body='{"id": 1}',
)
metadata = get_s3_object_metadata(
s3_client=s3_client,
bucket=bucket,
key=f"{prefix}/helpers/{get_s3_object_metadata.__name__}existing-object.json",
)
assert metadata.size == 9
def test_is_s3_object_exists():
assert is_s3_object_exists(
s3_client=s3_client,
bucket=bucket,
key=f"{prefix}/helpers/{is_s3_object_exists.__name__}/not-exists-object.json",
) is False
s3_client.put_object(
Bucket=bucket,
Key=f"{prefix}/helpers/{is_s3_object_exists.__name__}/existing-object.json",
Body='{"id": 1}',
)
assert is_s3_object_exists(
s3_client=s3_client,
bucket=bucket,
key=f"{prefix}/helpers/{is_s3_object_exists.__name__}/exists-object.json",
) is False
# def test_count_lines_in_s3_object():
# count_lines_in_s3_object(
# s3_client=s3_client,
# bucket=bucket
# )
if __name__ == "__main__":
import os
basename = os.path.basename(__file__)
pytest.main([basename, "-s", "--tb=native"])
|
[
"[email protected]"
] | |
e39b793d65bd411519cedbdc9c917e80ada47a62
|
7868c0496a96f51e602641de99e2c5d85f478c09
|
/src/anomalydetection/inqmeasurement.py
|
f060bd647e518bc4865e6293d42d71f6665de1dd
|
[
"Apache-2.0"
] |
permissive
|
Joaggi/Incremental-Anomaly-Detection-using-Quantum-Measurements
|
7e05a13f62bb867ded02f4bbfad075693bbddca4
|
c53cba3691f6a7af8b4b061be4a03f05121a2db9
|
refs/heads/main
| 2023-07-08T05:48:58.483713 | 2023-06-30T17:00:31 | 2023-06-30T17:00:31 | 522,575,822 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,371 |
py
|
import jax
from jax import jit
import jax.numpy as jnp
import numpy as np
from functools import partial
from sklearn.kernel_approximation import RBFSampler
from tqdm import tqdm
class QFeatureMap_rff():
def __init__(
self,
input_dim: int,
dim: int = 100,
gamma: float = 1,
random_state=None,
**kwargs
):
super().__init__(**kwargs)
self.input_dim = input_dim
self.dim = dim
self.gamma = gamma
self.random_state = random_state
self.vmap_compute = jax.jit(jax.vmap(self.compute, in_axes=(0, None, None, None), out_axes=0))
def build(self):
rbf_sampler = RBFSampler(
gamma=self.gamma,
n_components=self.dim,
random_state=self.random_state)
x = np.zeros(shape=(1, self.input_dim))
rbf_sampler.fit(x)
self.rbf_sampler = rbf_sampler
self.weights = jnp.array(rbf_sampler.random_weights_)
self.offset = jnp.array(rbf_sampler.random_offset_)
self.dim = rbf_sampler.get_params()['n_components']
def update_rff(self, weights, offset):
self.weights = jnp.array(weights)
self.offset = jnp.array(offset)
def get_dim(self, num_features):
return self.dim
@staticmethod
def compute(X, weights, offset, dim):
vals = jnp.dot(X, weights) + offset
#vals = jnp.einsum('i,ik->k', X, weights) + offset
vals = jnp.cos(vals)
vals *= jnp.sqrt(2.) / jnp.sqrt(dim)
return vals
@partial(jit, static_argnums=(0,))
def __call__(self, X):
vals = self.vmap_compute(X, self.weights, self.offset, self.dim)
norms = jnp.linalg.norm(vals, axis=1)
psi = vals / norms[:, jnp.newaxis]
return psi
class InqMeasurement():
def __init__(self, input_shape, dim_x, gamma, random_state=None, batch_size = 300):
self.gamma = gamma
self.dim_x = dim_x
self.fm_x = QFeatureMap_rff( input_dim=input_shape, dim = dim_x, gamma = gamma, random_state = random_state)
self.fm_x.build()
self.num_samples = 0
self.train_pure_batch = jax.jit(jax.vmap(self.train_pure, in_axes=(0)))
self.collapse_batch = jax.jit(jax.vmap(self.collapse, in_axes=(0, None)))
self.sum_batch = jax.jit(self.sum)
self.key = jax.random.PRNGKey(random_state)
self.batch_size = batch_size
@staticmethod
def train_pure(inputs):
oper = jnp.einsum(
'...i,...j->...ij',
inputs, jnp.conj(inputs),
optimize='optimal') # shape (b, nx, nx)
return oper
@staticmethod
def sum(rho_res):
return jnp.sum(rho_res, axis=0)
@staticmethod
@partial(jit, static_argnums=(1,2,3,4))
def compute_training_jit(batch, alpha, fm_x, train_pure_batch, sum_batch, rho):
inputs = fm_x(batch)
rho_res = train_pure_batch(inputs)
rho_res = sum_batch(rho_res)
return jnp.add((alpha)*rho_res, (1-alpha)*rho) if rho is not None else rho_res
#return jnp.add(rho_res, rho) if rho is not None else rho_res
@staticmethod
def compute_training(values, alpha, perm, i, batch_size, fm_x, train_pure_batch, sum_batch, rho, compute_training_jit):
batch_idx = perm[i * batch_size: (i + 1)*batch_size]
batch = values[batch_idx, :]
return compute_training_jit(batch, alpha, fm_x, train_pure_batch, sum_batch, rho)
def initial_train(self, values, alpha):
num_batches = InqMeasurement.obtain_params_batches(values, self.batch_size, self.key)
#print('Time obtain_params_batches: ', stop - start)
num_train = values.shape[0]
perm = jnp.arange(num_train)
for i in range(num_batches):
#start = timeit.default_timer()
#batch_idx = perm[i * self.batch_size: (i + 1)*self.batch_size]
#stop = timeit.default_timer()
#print('Time batch_idx: ', stop - start)
#
#start = timeit.default_timer()
#batch = values[batch_idx, :]
##batch = values
#stop = timeit.default_timer()
#print('Time capture data: ', stop - start)
#
#
#start = timeit.default_timer()
#inputs = self.fm_x(batch)
#stop = timeit.default_timer()
#print('Time fm_x: ', stop - start)
#
#start = timeit.default_timer()
#rho_res = self.train_pure_batch(inputs)
#stop = timeit.default_timer()
#print('Time rho_res: ', stop - start)
#
#start = timeit.default_timer()
#rho_res = self.sum_batch(rho_res)
#stop = timeit.default_timer()
#print('Time sum rho_res: ', stop - start)
#print(self.fm_x.weights.shape)
if hasattr(self, "rho_res"):
self.rho_res = self.compute_training(values, alpha, perm, i, self.batch_size, self.fm_x,
self.train_pure_batch, self.sum_batch, self.rho_res, self.compute_training_jit)
else:
self.rho_res = self.compute_training(values, alpha, perm, i, self.batch_size, self.fm_x,
self.train_pure_batch, self.sum_batch, None, self.compute_training_jit)
#print('Time sum rho_res and self: ', stop - start)
self.num_samples += values.shape[0]
#print('Time initial_training: ', stop_initial_train - start_initial_train)
@staticmethod
def collapse(inputs, rho_res):
rho_h = jnp.matmul(jnp.conj(inputs), rho_res)
rho_res = jnp.einsum(
'...i, ...i -> ...',
rho_h, jnp.conj(rho_h),
optimize='optimal') # shape (b,)
#rho_res = jnp.dot(rho_h, jnp.conj(rho_h))
return rho_res
@staticmethod
def obtain_params_batches(values, batch_size, key):
num_train = values.shape[0]
num_complete_batches, leftover = divmod(num_train, batch_size)
num_batches = num_complete_batches + bool(leftover)
return num_batches
@partial(jit, static_argnums=(0,))
def predict(self, values):
num_batches = InqMeasurement.obtain_params_batches(values, self.batch_size, self.key)
results = None
rho_res = self.rho_res / self.num_samples
num_train = values.shape[0]
perm = jnp.arange(num_train)
for i in range(num_batches):
batch_idx = perm[i * self.batch_size: (i + 1)*self.batch_size]
batch = values[batch_idx, :]
inputs = self.fm_x(batch)
batch_probs = self.collapse_batch(inputs, rho_res)
results = jnp.concatenate([results, batch_probs], axis=0) if results is not None else batch_probs
return results
|
[
"[email protected]"
] | |
89a55f90597435c511a95f43dd03808662a4461f
|
85a9ffeccb64f6159adbd164ff98edf4ac315e33
|
/pysnmp/SONOMASYSTEMS-SONOMA-SLIP-MIB.py
|
37c94cdf6f1275135ced4093657535300758d95a
|
[
"Apache-2.0"
] |
permissive
|
agustinhenze/mibs.snmplabs.com
|
5d7d5d4da84424c5f5a1ed2752f5043ae00019fb
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
refs/heads/master
| 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 |
Apache-2.0
| 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null |
UTF-8
|
Python
| false | false | 4,019 |
py
|
#
# PySNMP MIB module SONOMASYSTEMS-SONOMA-SLIP-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/SONOMASYSTEMS-SONOMA-SLIP-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:01:32 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ConstraintsUnion")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Unsigned32, Counter32, Counter64, ModuleIdentity, NotificationType, Integer32, TimeTicks, ObjectIdentity, iso, IpAddress, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "Counter32", "Counter64", "ModuleIdentity", "NotificationType", "Integer32", "TimeTicks", "ObjectIdentity", "iso", "IpAddress", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "MibIdentifier")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
sonomaApplications, = mibBuilder.importSymbols("SONOMASYSTEMS-SONOMA-MIB", "sonomaApplications")
slip = MibIdentifier((1, 3, 6, 1, 4, 1, 2926, 25, 8, 2))
slipSpeed = MibScalar((1, 3, 6, 1, 4, 1, 2926, 25, 8, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("sp2400", 1), ("sp4800", 2), ("sp9600", 3), ("sp19200", 4), ("sp38400", 5), ("sp57600", 6), ("sp115200", 7))).clone('sp9600')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slipSpeed.setStatus('mandatory')
slipDataBits = MibScalar((1, 3, 6, 1, 4, 1, 2926, 25, 8, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3))).clone(namedValues=NamedValues(("db8", 3))).clone('db8')).setMaxAccess("readonly")
if mibBuilder.loadTexts: slipDataBits.setStatus('mandatory')
slipParity = MibScalar((1, 3, 6, 1, 4, 1, 2926, 25, 8, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3))).clone(namedValues=NamedValues(("none", 3))).clone('none')).setMaxAccess("readonly")
if mibBuilder.loadTexts: slipParity.setStatus('mandatory')
slipStopBits = MibScalar((1, 3, 6, 1, 4, 1, 2926, 25, 8, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("sb1", 1), ("sb2", 2))).clone('sb1')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slipStopBits.setStatus('mandatory')
slipFlowControl = MibScalar((1, 3, 6, 1, 4, 1, 2926, 25, 8, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3))).clone(namedValues=NamedValues(("none", 3))).clone('none')).setMaxAccess("readonly")
if mibBuilder.loadTexts: slipFlowControl.setStatus('mandatory')
slipType = MibScalar((1, 3, 6, 1, 4, 1, 2926, 25, 8, 2, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("uncompressed", 1), ("compressed", 2))).clone('uncompressed')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slipType.setStatus('mandatory')
slipMtu = MibScalar((1, 3, 6, 1, 4, 1, 2926, 25, 8, 2, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(64, 1518)).clone(1006)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: slipMtu.setStatus('mandatory')
mibBuilder.exportSymbols("SONOMASYSTEMS-SONOMA-SLIP-MIB", slip=slip, slipSpeed=slipSpeed, slipParity=slipParity, slipStopBits=slipStopBits, slipDataBits=slipDataBits, slipFlowControl=slipFlowControl, slipMtu=slipMtu, slipType=slipType)
|
[
"[email protected]"
] | |
feca5ccc14854f43ec13b95a54fae7811bed008c
|
227a48922280ba3e1f47d54a81ddbcc8ad7b2c89
|
/BigCode/crawler-master/GetGithubInfo.py
|
fcabc199c220e52e8a1ab6534d3e27687ea92688
|
[] |
no_license
|
zk467701860/MyWorkingProject
|
948f652c26886282be2afaddddd18f069271946c
|
b81e7cccf6f951442b177bb0cfdc68180e9b9011
|
refs/heads/master
| 2020-03-22T03:17:24.299582 | 2018-07-30T02:20:57 | 2018-07-30T02:20:57 | 139,423,215 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 11,570 |
py
|
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
from redis import Redis
import MySQLdb
from github import Github
from git import Repo
import subprocess
import datetime
import traceback
## redis and mysql connection
r = Redis(host='10.131.252.156',port=6379)
while(r.llen('ids')!=0):
id = r.lpop('ids')
sourcecode = r.lpop('sourcecodes')
print " "+id+" "+sourcecode
if(sourcecode.find(r"https://github.com/")!=-1):
conn = MySQLdb.connect(host='localhost', user='root', passwd='root', port=3306,use_unicode=True, charset="utf8")
cur = conn.cursor()
conn.select_db('fdroid')
try:
## connecting github
ACCESS_USERNAME = '[email protected]'
ACCESS_PWD = "abcd123456"
client = Github(ACCESS_USERNAME, ACCESS_PWD)
githubId = sourcecode[19:]
repo = client.get_repo(githubId)
localAdress = r'/home/fdse/BigCode/FdroidRepo/' + githubId
# ## download github repository
# try:
# localAdress = r'/home/fdse/BigCode/FdroidRepo/' + githubId
# #localAdress = r'D:\test/' + githubId
# subprocess.call(['git', 'clone', sourcecode, localAdress])
# except Exception, e:
# print 'download fail!' + sourcecode
# r.rpush('ids', id)
# r.rpush('sourcecodes', sourcecode)
# print e.message
# print traceback.print_exc()
# continue
## get repository info
repoDis = None
if( repo.description):
repoDis = repo.description.encode('utf-8')
try:
cur.execute(
"insert into repository(repository_name,git_address,issue_address,local_address,description,added_date) values(%s,%s,%s,%s,%s,%s)",
(repo.name, sourcecode, repo.issues_url, localAdress,
repoDis,
repo.created_at))
conn.commit()
except Exception, e:
print e.message
print traceback.print_exc()
print 'insert repo fail!' + repo.name
r.rpush('ids', id)
r.rpush('sourcecodes', sourcecode)
cur.close()
conn.close()
conn = MySQLdb.connect(host='localhost', user='root', passwd='root', port=3306,use_unicode=True, charset="utf8")
cur = conn.cursor()
conn.select_db('fdroid')
continue
aa = cur.execute('select repository_id from repository where git_address = %s', sourcecode)
repoId = int(cur.fetchone()[0])
issueList = repo.get_issues()
commitList = repo.get_commits()
## get issue info from github, including event and comment
for issue in issueList:
try:
issueEvents = issue.get_events()
issueComments = issue.get_comments()
issueLabels = ""
for issueLabel in issue.labels:
issueLabels += "; " + issueLabel.name
## add issue info into mysql
assigneeName = None
assigneeId = -1
issueClosedTime = None
if (issue.assignee):
assigneeId = issue.assignee.id
assigneeName = issue.assignee.name
if(issue.closed_at):
issueClosedTime = issue.closed_at.strftime("%Y-%m-%d %H:%M:%S")
cur.execute(
"insert into issue(repository_id,issue_id,created_at,closed_at,assignee_name,assignee_id,state,number,title,content,labels) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
(repoId, issue.id, issue.created_at.strftime("%Y-%m-%d %H:%M:%S"),
issueClosedTime, assigneeName, assigneeId, issue.state,
issue.number, issue.title.encode('utf-8'), issue.body.encode('utf-8'), issueLabels))
conn.commit()
print 'issue'
## add issueEvent into mysql
for issueEvent in issueEvents:
try:
issueEventActId = -1
issueEventActName = None
if (issueEvent.actor):
issueEventActId = issueEvent.actor.id
issueEventActName = issueEvent.actor.name
cur.execute(
"insert into issueevent(repository_id,issue_id,event_id,about_commit_id,event,created_at,actor_id,actor_name) values(%s,%s,%s,%s,%s,%s,%s,%s)",
(
repoId, issue.id, issueEvent.id, issueEvent.commit_id, issueEvent.event,
issueEvent.created_at.strftime("%Y-%m-%d %H:%M:%S"), issueEventActId,
issueEventActName))
conn.commit()
except Exception, e:
print e.message
print traceback.print_exc()
print 'insert issueEvent fail!' + repo.name
cur.close()
conn.close()
conn = MySQLdb.connect(host='localhost', user='root', passwd='root', port=3306,use_unicode=True, charset="utf8")
cur = conn.cursor()
conn.select_db('fdroid')
continue
print 'event'
for issueComment in issueComments:
issueCommentName = None
issueCommentId = -1
if (issueComment.user):
issueCommentId = issueComment.user.id
issueCommentName = issueComment.user.name
try:
cur.execute(
"insert into issuecomment(repository_id,issue_id,comment_id,author_id,author_name,content,created_at,updated_at) values(%s,%s,%s,%s,%s,%s,%s,%s)",
(repoId, issue.id, issueComment.id, issueCommentId, issueCommentName,
issueComment.body,
issueComment.created_at.strftime("%Y-%m-%d %H:%M:%S"),
issueComment.updated_at))
conn.commit()
except Exception, e:
print e.message
print traceback.print_exc()
print 'insert issueComment fail!' + repo.name
cur.close()
conn.close()
conn = MySQLdb.connect(host='localhost', user='root', passwd='root', port=3306,use_unicode=True, charset="utf8")
cur = conn.cursor()
conn.select_db('fdroid')
continue
print 'comment'
except Exception, e:
print e.message
print traceback.print_exc()
cur.close()
conn.close()
conn = MySQLdb.connect(host='localhost', user='root', passwd='root', port=3306,use_unicode=True, charset="utf8")
cur = conn.cursor()
conn.select_db('fdroid')
print 'insert issue fail!' + repo.name
continue
## get commit info from github
for commit in commitList:
try:
committerName = None
committerId = -1
if (commit.committer):
committerId = commit.committer.id
committerName = commit.committer.name
cur.execute(
"insert into gitcommit(commit_id,repository_id,author_id,author_name,message,additions,deletions) values(%s,%s,%s,%s,%s,%s,%s)",
(commit.sha, repoId, committerId, committerName, commit.commit.message.encode('utf-8'),
commit.stats.additions, commit.stats.deletions))
for commitParent in commit.parents:
cur.execute(
"insert into commitparent(commit_id,parent_id) values(%s,%s)",
(commit.sha, commitParent.sha))
conn.commit()
except Exception, e:
print e.message
print traceback.print_exc()
cur.close()
conn.close()
conn = MySQLdb.connect(host='localhost', user='root', passwd='root', port=3306,use_unicode=True, charset="utf8")
cur = conn.cursor()
conn.select_db('fdroid')
print 'insert commitinfo fail!' + repo.name
continue
print 'commit'
## get commit diff
# repo = Repo(localAdress)
# commit = repo.commit('master~0')
# prepared = [commit]
# i = 1
# committed_ids = []
# while len(prepared) > 0:
# commit = prepared.pop()
# committed_ids.append(commit.hexsha)
# try:
# commit_id = commit.hexsha
# diff = repo.git.diff(commit.parents[0], commit).encode()
# time = commit.committed_date
# cur.execute(
# "update gitcommit set commit_date=%s,diff=%s where commit_id = %s",
# (datetime.datetime.fromtimestamp(time), diff, commit_id))
# conn.commit()
# print i, commit_id
# except Exception, e:
# print e.message
# print traceback.print_exc()
# # print commit.message
# print i
# cur.close()
# conn.close()
# conn = MySQLdb.connect(host='localhost', user='root', passwd='root', port=3306,use_unicode=True, charset="utf8")
# cur = conn.cursor()
# conn.select_db('fdroid')
# for parent in commit.parents:
# if (parent not in prepared and parent.hexsha not in committed_ids):
# prepared.append(parent)
# prepared.sort(key=lambda x: x.committed_date)
# i = i + 1
# print i, "......."
conn.commit()
cur.close()
conn.close()
print 'complete'
except Exception,e:
print '------------------error'
ACCESS_USERNAME = '[email protected]'
ACCESS_PWD = "abcd123456"
client = Github(ACCESS_USERNAME, ACCESS_PWD)
r.rpush('ids', id)
r.rpush('sourcecodes', sourcecode)
print e.message
print traceback.print_exc()
|
[
"[email protected]"
] | |
bea7606e2dce71427ecd585839a735739be94357
|
615f83418985b80f2a2a47200acb08dfa9418fc7
|
/sales/forms.py
|
cd25ae516223049a1713bca426d4795827089c9f
|
[
"MIT"
] |
permissive
|
alejo8591/maker
|
a42b89ddc426da326a397765dc091db45dd50d8e
|
001e85eaf489c93b565efe679eb159cfcfef4c67
|
refs/heads/master
| 2016-09-06T19:36:01.864526 | 2013-03-23T06:54:21 | 2013-03-23T06:54:21 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 43,303 |
py
|
# encoding: utf-8
# Copyright 2013 maker
# License
# -*- coding: utf-8 -*-
"""
Sales module forms
"""
from django.shortcuts import get_object_or_404
from django import forms
from django.db.models import Q
from maker.sales.models import Product, SaleOrder, SaleSource, Lead, Opportunity, \
SaleStatus, OrderedProduct, Subscription, Currency
from maker.finance.models import Transaction
from maker.identities.models import Contact
from maker.core.models import Object, ModuleSetting, User, UpdateRecord
from django.core.urlresolvers import reverse
from maker.core.decorators import preprocess_form
from django.utils.translation import ugettext as _
preprocess_form()
standard_currencies = (
("AED","AED United Arab Emirates, Dirhams"),
("AFN","AFN Afghanistan, Afghanis"),
("ALL","ALL Albania, Leke"),
("AMD","AMD Armenia, Drams"),
("ANG","ANG Netherlands Antilles, Guilders (also called Florins)"),
("AOA","AOA Angola, Kwanza"),
("ARS","ARS Argentina, Pesos"),
("AUD","AUD Australia, Dollars"),
("AWG","AWG Aruba, Guilders (also called Florins)"),
("AZN","AZN Azerbaijan, New Manats"),
("BAM","BAM Bosnia and Herzegovina, Convertible Marka"),
("BBD","BBD Barbados, Dollars"),
("BDT","BDT Bangladesh, Taka"),
("BGN","BGN Bulgaria, Leva"),
("BHD","BHD Bahrain, Dinars"),
("BIF","BIF Burundi, Francs"),
("BMD","BMD Bermuda, Dollars"),
("BND","BND Brunei Darussalam, Dollars"),
("BOB","BOB Bolivia, Bolivianos"),
("BRL","BRL Brazil, Brazil Real"),
("BSD","BSD Bahamas, Dollars"),
("BTN","BTN Bhutan, Ngultrum"),
("BWP","BWP Botswana, Pulas"),
("BYR","BYR Belarus, Rubles"),
("BZD","BZD Belize, Dollars"),
("CAD","CAD Canada, Dollars"),
("CDF","CDF Congo/Kinshasa, Congolese Francs"),
("CHF","CHF Switzerland, Francs"),
("CLP","CLP Chile, Pesos"),
("CNY","CNY China, Yuan Renminbi"),
("COP","COP Colombia, Pesos"),
("CRC","CRC Costa Rica, Colones"),
("CUP","CUP Cuba, Pesos"),
("CVE","CVE Cape Verde, Escudos"),
("CZK","CZK Czech Republic, Koruny"),
("DJF","DJF Djibouti, Francs"),
("DKK","DKK Denmark, Kroner"),
("DOP","DOP Dominican Republic, Pesos"),
("DZD","DZD Algeria, Algeria Dinars"),
("EGP","EGP Egypt, Pounds"),
("ERN","ERN Eritrea, Nakfa"),
("ETB","ETB Ethiopia, Birr"),
("EUR","EUR Euro Member Countries, Euro"),
("FJD","FJD Fiji, Dollars"),
("FKP","FKP Falkland Islands (Malvinas), Pounds"),
("GBP","GBP United Kingdom, Pounds"),
("GEL","GEL Georgia, Lari"),
("GGP","GGP Guernsey, Pounds"),
("GHS","GHS Ghana, Cedis"),
("GIP","GIP Gibraltar, Pounds"),
("GMD","GMD Gambia, Dalasi"),
("GNF","GNF Guinea, Francs"),
("GTQ","GTQ Guatemala, Quetzales"),
("GYD","GYD Guyana, Dollars"),
("HKD","HKD Hong Kong, Dollars"),
("HNL","HNL Honduras, Lempiras"),
("HRK","HRK Croatia, Kuna"),
("HTG","HTG Haiti, Gourdes"),
("HUF","HUF Hungary, Forint"),
("IDR","IDR Indonesia, Rupiahs"),
("ILS","ILS Israel, New Shekels"),
("IMP","IMP Isle of Man, Pounds"),
("INR","INR India, Rupees"),
("IQD","IQD Iraq, Dinars"),
("IRR","IRR Iran, Rials"),
("ISK","ISK Iceland, Kronur"),
("JEP","JEP Jersey, Pounds"),
("JMD","JMD Jamaica, Dollars"),
("JOD","JOD Jordan, Dinars"),
("JPY","JPY Japan, Yen"),
("KES","KES Kenya, Shillings"),
("KGS","KGS Kyrgyzstan, Soms"),
("KHR","KHR Cambodia, Riels"),
("KMF","KMF Comoros, Francs"),
("KPW","KPW Korea (North), Won"),
("KRW","KRW Korea (South), Won"),
("KWD","KWD Kuwait, Dinars"),
("KYD","KYD Cayman Islands, Dollars"),
("KZT","KZT Kazakhstan, Tenge"),
("LAK","LAK Laos, Kips"),
("LBP","LBP Lebanon, Pounds"),
("LKR","LKR Sri Lanka, Rupees"),
("LRD","LRD Liberia, Dollars"),
("LSL","LSL Lesotho, Maloti"),
("LTL","LTL Lithuania, Litai"),
("LVL","LVL Latvia, Lati"),
("LYD","LYD Libya, Dinars"),
("MAD","MAD Morocco, Dirhams"),
("MDL","MDL Moldova, Lei"),
("MGA","MGA Madagascar, Ariary"),
("MKD","MKD Macedonia, Denars"),
("MMK","MMK Myanmar (Burma), Kyats"),
("MNT","MNT Mongolia, Tugriks"),
("MOP","MOP Macau, Patacas"),
("MRO","MRO Mauritania, Ouguiyas"),
("MUR","MUR Mauritius, Rupees"),
("MVR","MVR Maldives (Maldive Islands), Rufiyaa"),
("MWK","MWK Malawi, Kwachas"),
("MXN","MXN Mexico, Pesos"),
("MYR","MYR Malaysia, Ringgits"),
("MZN","MZN Mozambique, Meticais"),
("NAD","NAD Namibia, Dollars"),
("NGN","NGN Nigeria, Nairas"),
("NIO","NIO Nicaragua, Cordobas"),
("NOK","NOK Norway, Krone"),
("NPR","NPR Nepal, Nepal Rupees"),
("NZD","NZD New Zealand, Dollars"),
("OMR","OMR Oman, Rials"),
("PAB","PAB Panama, Balboa"),
("PEN","PEN Peru, Nuevos Soles"),
("PGK","PGK Papua New Guinea, Kina"),
("PHP","PHP Philippines, Pesos"),
("PKR","PKR Pakistan, Rupees"),
("PLN","PLN Poland, Zlotych"),
("PYG","PYG Paraguay, Guarani"),
("QAR","QAR Qatar, Rials"),
("RON","RON Romania, New Lei"),
("RSD","RSD Serbia, Dinars"),
("RUB","RUB Russia, Rubles"),
("RWF","RWF Rwanda, Rwanda Francs"),
("SAR","SAR Saudi Arabia, Riyals"),
("SBD","SBD Solomon Islands, Dollars"),
("SCR","SCR Seychelles, Rupees"),
("SDG","SDG Sudan, Pounds"),
("SEK","SEK Sweden, Kronor"),
("SGD","SGD Singapore, Dollars"),
("SHP","SHP Saint Helena, Pounds"),
("SLL","SLL Sierra Leone, Leones"),
("SOS","SOS Somalia, Shillings"),
("SPL","SPL Seborga, Luigini"),
("SRD","SRD Suriname, Dollars"),
("STD","STD Sao Tome and Principe, Dobras"),
("SVC","SVC El Salvador, Colones"),
("SYP","SYP Syria, Pounds"),
("SZL","SZL Swaziland, Emalangeni"),
("THB","THB Thailand, Baht"),
("TJS","TJS Tajikistan, Somoni"),
("TMM","TMM Turkmenistan, Manats"),
("TND","TND Tunisia, Dinars"),
("TOP","TOP Tonga, Pa'anga"),
("TRY","TRY Turkey, New Lira"),
("TTD","TTD Trinidad and Tobago, Dollars"),
("TVD","TVD Tuvalu, Tuvalu Dollars"),
("TWD","TWD Taiwan, New Dollars"),
("TZS","TZS Tanzania, Shillings"),
("UAH","UAH Ukraine, Hryvnia"),
("UGX","UGX Uganda, Shillings"),
("USD","USD United States of America, Dollars"),
("UYU","UYU Uruguay, Pesos"),
("UZS","UZS Uzbekistan, Sums"),
("VEF","VEF Venezuela, Bolivares Fuertes"),
("VND","VND Viet Nam, Dong"),
("VUV","VUV Vanuatu, Vatu"),
("WST","WST Samoa, Tala"),
("XAF","XAF Communaute Financiere Africaine BEAC, Francs"),
("XAG","XAG Silver, Ounces"),
("XAU","XAU Gold, Ounces"),
("XCD","XCD East Caribbean Dollars"),
("XDR","XDR International Monetary Fund (IMF) Special Drawing Rights"),
("XOF","XOF Communaute Financiere Africaine BCEAO, Francs"),
("XPD","XPD Palladium Ounces"),
("XPF","XPF Comptoirs Francais du Pacifique Francs"),
("XPT","XPT Platinum, Ounces"),
("YER","YER Yemen, Rials"),
("ZAR","ZAR South Africa, Rand"),
("ZMK","ZMK Zambia, Kwacha"),
("ZWD","ZWD Zimbabwe, Zimbabwe Dollars")
)
dict_currencies = dict(standard_currencies)
class SettingsForm(forms.Form):
""" Administration settings form """
default_currency = forms.ModelChoiceField(label=_('Base Currency'), queryset=Currency.objects)
default_lead_status = forms.ModelChoiceField(label=_('Default Lead Status'), queryset=[])
default_opportunity_status = forms.ModelChoiceField(label=_('Default Opportunity Status'), queryset=[])
default_order_status = forms.ModelChoiceField(label=_('Default Order Status'), queryset=[])
default_order_source = forms.ModelChoiceField(label=_('Default Order Source'), queryset=[])
default_order_product = forms.ModelChoiceField(label=_('Default Order Product'), queryset=[], required=False)
order_fulfil_status = forms.ModelChoiceField(label=_('Order Fulfilment Status'), queryset=[])
def __init__(self, user, *args, **kwargs):
"Sets choices and initial value"
super(SettingsForm, self).__init__(*args, **kwargs)
self.fields['default_lead_status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_leads=True))
self.fields['default_opportunity_status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_opportunities=True))
self.fields['default_order_status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_sales=True))
self.fields['default_order_source'].queryset = Object.filter_permitted(user,
SaleSource.objects.all())
self.fields['order_fulfil_status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_sales=True))
self.fields['default_order_product'].queryset = Object.filter_permitted(user,
Product.objects.filter(active=True))
# Translation
self.fields['default_currency'].label = _('Base Currency')
self.fields['default_lead_status'].label = _('Default Lead Status')
self.fields['default_opportunity_status'].label = _('Default Opportunity Status')
self.fields['default_order_status'].label = _('Default Order Status')
self.fields['default_order_source'].label = _('Default Order Source')
self.fields['default_order_product'].label = _('Default Order Product')
self.fields['order_fulfil_status'].label = _('Order Fulfilment Status')
try:
self.fields['default_currency'].queryset = Currency.objects
self.fields['default_currency'].initial = Currency.objects.get(is_default__exact=True)
self.fields['default_currency'].widget.attrs.update({'popuplink': reverse('sales_currency_add')})
except:
pass
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_opportunity_status')[0]
default_opportunity_status = SaleStatus.objects.get(pk=long(conf.value))
self.fields['default_opportunity_status'].initial = default_opportunity_status.id
except:
pass
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_lead_status')[0]
default_lead_status = SaleStatus.objects.get(pk=long(conf.value))
self.fields['default_lead_status'].initial = default_lead_status.id
except:
pass
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_order_status')[0]
default_order_status = SaleStatus.objects.get(pk=long(conf.value))
self.fields['default_order_status'].initial = default_order_status.id
except:
pass
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_order_source')[0]
default_order_source = SaleSource.objects.get(pk=long(conf.value))
self.fields['default_order_source'].initial = default_order_source.id
except:
pass
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_order_product')[0]
default_order_product = Product.objects.get(pk=long(conf.value))
self.fields['default_order_product'].initial = default_order_product.id
except:
pass
try:
conf = ModuleSetting.get_for_module('maker.sales', 'order_fulfil_status')[0]
order_fulfil_status = SaleStatus.objects.get(pk=long(conf.value))
self.fields['order_fulfil_status'].initial = order_fulfil_status.id
except:
pass
def save(self):
"Form processor"
fields = self.fields
try:
for field in fields:
if self.cleaned_data[field]:
if field == 'default_currency':
ModuleSetting.set_for_module('default_currency',
self.cleaned_data['default_currency'],
'maker.sales')
currency = Currency.objects.get(pk=self.cleaned_data['default_currency'])
currency.is_default = True
currency.save()
else:
ModuleSetting.set_for_module(field, self.cleaned_data[field].id,
'maker.sales')
return True
except:
return False
class MassActionForm(forms.Form):
""" Mass action form for Orders """
status = forms.ModelChoiceField(queryset=[], required=False)
assignedto = forms.ModelChoiceField(queryset=[], required=False )
delete = forms.ChoiceField(label=_("Delete"), choices=(('', '-----'), ('delete', _('Delete Completely')),
('trash', _('Move to Trash'))), required=False)
instance = None
def __init__(self, user, *args, **kwargs):
if 'instance' in kwargs:
self.instance = kwargs['instance']
del kwargs['instance']
super(MassActionForm, self).__init__(*args, **kwargs)
self.fields['status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_sales=True),
mode='x')
self.fields['status'].label = _("Status:")
self.fields['delete'] = forms.ChoiceField(label=_("Delete"), choices=(('', '-----'),
('delete', _('Delete Completely')),
('trash', _('Move to Trash'))), required=False)
self.fields['assignedto'].queryset = User.objects
self.fields['assignedto'].label = _("Assign To:")
#self.fields['assignedto'].widget.attrs.update({'class': 'autocomplete',
# 'callback': reverse('identities_ajax_user_lookup')})
def save(self, *args, **kwargs):
"Process form"
if self.instance:
if self.is_valid():
if self.cleaned_data['status']:
self.instance.status = self.cleaned_data['status']
if self.cleaned_data['assignedto']:
self.instance.assigned.add(self.cleaned_data['assignedto'])
self.instance.save()
if self.cleaned_data['delete']:
if self.cleaned_data['delete'] == 'delete':
self.instance.delete()
if self.cleaned_data['delete'] == 'trash':
self.instance.trash = True
self.instance.save()
class LeadMassActionForm(forms.Form):
""" Mass action form for Orders """
status = forms.ModelChoiceField(queryset=[], required=False)
assignedto = forms.ModelChoiceField(queryset=[], required=False )
instance = None
def __init__(self, user, *args, **kwargs):
if 'instance' in kwargs:
self.instance = kwargs['instance']
del kwargs['instance']
super(LeadMassActionForm, self).__init__(*args, **kwargs)
self.fields['status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_leads=True),
mode='x')
self.fields['status'].label = _("Status:")
self.fields['assignedto'].queryset = User.objects
self.fields['assignedto'].label = _("Assign To:")
# self.fields['assignedto'].widget.attrs.update({'class': 'autocomplete',
# 'callback': reverse('identities_ajax_user_lookup')})
def save(self, *args, **kwargs):
"Process form"
if self.instance:
if self.is_valid():
if self.cleaned_data['status']:
self.instance.status = self.cleaned_data['status']
if self.cleaned_data['assignedto']:
self.instance.assigned.add(self.cleaned_data['assignedto'])
self.instance.save()
class OpportunityMassActionForm(forms.Form):
""" Mass action form for Orders """
status = forms.ModelChoiceField(queryset=[], required=False)
assignedto = forms.ModelChoiceField(queryset=[], required=False )
instance = None
def __init__(self, user, *args, **kwargs):
if 'instance' in kwargs:
self.instance = kwargs['instance']
del kwargs['instance']
super(OpportunityMassActionForm, self).__init__(*args, **kwargs)
self.fields['status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_opportunities=True),
mode='x')
self.fields['status'].label = _("Status:")
self.fields['assignedto'].queryset = User.objects
self.fields['assignedto'].label = _("Assign To:")
# self.fields['assignedto'].widget.attrs.update({'class': 'autocomplete',
# 'callback': reverse('identities_ajax_user_lookup')})
def save(self, *args, **kwargs):
"Process form"
if self.instance:
if self.is_valid():
if self.cleaned_data['status']:
self.instance.status = self.cleaned_data['status']
if self.cleaned_data['assignedto']:
self.instance.assigned.add(self.cleaned_data['assignedto'])
self.instance.save()
class ProductMassActionForm(forms.Form):
""" Mass action form for Products """
active = forms.ChoiceField(label=_("Action"), choices=(('', '-------'),('active','Mark as Active'),
('inactive','Mark as Inactive')), required=False)
instance = None
def __init__(self, user, *args, **kwargs):
if 'instance' in kwargs:
self.instance = kwargs['instance']
del kwargs['instance']
super(ProductMassActionForm, self).__init__(*args, **kwargs)
# Translation
self.fields['active'].label = _("Action")
def save(self, *args, **kwargs):
"Process form"
if self.instance:
if self.is_valid():
if self.cleaned_data['active'] == 'active':
self.instance.active = True
if self.cleaned_data['active'] == 'inactive':
self.instance.active = False
self.instance.save()
class SaleStatusForm(forms.ModelForm):
""" Status form """
name = forms.CharField(widget=forms.TextInput(attrs={'size':'40'}))
def __init__(self, user, *args, **kwargs):
super(SaleStatusForm, self).__init__(*args, **kwargs)
self.fields['name'].label = _("Name")
self.fields['use_leads'].label = _("Enabled for Leads")
self.fields['use_opportunities'].label = _("Enabled for Opportunities")
self.fields['use_sales'].label = _("Enabled for Sales")
self.fields['active'].label = _("Active")
self.fields['hidden'].label = _("Hidden")
self.fields['details'].label = _("Details")
self.fields['active'].initial = True
class Meta:
"Sales Status Form"
model = SaleStatus
fields = ('name', 'use_leads', 'use_opportunities', 'use_sales', 'active', 'hidden', 'details')
class SaleSourceForm(forms.ModelForm):
""" Status form """
name = forms.CharField(widget=forms.TextInput(attrs={'size':'40'}))
def __init__(self, user, *args, **kwargs):
super(SaleSourceForm, self).__init__(*args, **kwargs)
self.fields['active'].initial = True
self.fields['name'].label = _("Name")
self.fields['active'].label = _("Active")
self.fields['details'].label = _("Details")
class Meta:
"Sale Source Form"
model = SaleSource
fields = ('name', 'active', 'details')
class ProductForm(forms.ModelForm):
""" Product form """
name = forms.CharField(widget=forms.TextInput(attrs={'size':'40'}))
def __init__(self, user, parent=None, *args, **kwargs):
super(ProductForm, self).__init__(*args, **kwargs)
self.fields['supplier'].queryset = Object.filter_permitted(user, Contact.objects)
self.fields['supplier'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['supplier'].widget.attrs.update({'popuplink': reverse('identities_contact_add')})
self.fields['supplier'].label = _("Supplier")
self.fields['active'].initial = True
self.fields['active'].label = _("Active")
manager = Product.objects.filter(active=True)
if 'instance' in kwargs:
instance = kwargs['instance']
manager = manager.exclude(Q(parent=instance) & Q(pk=instance.id))
self.fields['parent'].queryset = Object.filter_permitted(user, manager, mode='x')
if parent:
self.fields['parent'].initial = get_object_or_404(Product, pk=parent)
self.fields['parent'].label = _("Parent")
self.fields['product_type'].label = _("Product type")
self.fields['code'].label = _("Code")
self.fields['supplier_code'].label = _("Supplier code")
self.fields['buy_price'].label = _("Buy price")
self.fields['sell_price'].label = _("Sell price")
self.fields['stock_quantity'].label = _("Stock quantity")
self.fields['runout_action'].label = _("Runout action")
self.fields['details'].label = _("Details")
class Meta:
"ProductForm"
model = Product
fields = ('name', 'parent', 'product_type', 'code', 'supplier', 'supplier_code', 'buy_price',
'sell_price', 'stock_quantity', 'active', 'runout_action', 'details')
class ProductFilterForm(forms.ModelForm):
""" Ticket Filters definition """
def __init__(self, user, skip=[], *args, **kwargs):
super(ProductFilterForm, self).__init__(*args, **kwargs)
self.fields['product_type'].queryset = Object.filter_permitted(user,
Product.objects.filter(active=True))
self.fields['product_type'].required = False
self.fields['product_type'].label = _("Product type")
self.fields['supplier'].queryset = Object.filter_permitted(user, Contact.objects)
self.fields['supplier'].required = False
self.fields['supplier'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['supplier'].label = _("Supplier")
self.fields['active'].required = False
self.fields['active'].initial = True
self.fields['active'].label = _("Active")
class Meta:
"Product Filter Form"
model = Product
fields = ('product_type', 'supplier', 'active')
class UpdateRecordForm(forms.ModelForm):
"UpdateRecord form"
def __init__(self, *args, **kwargs):
super(UpdateRecordForm, self).__init__(*args, **kwargs)
self.fields['body'].label = _("Details")
self.fields['body'].required = True
class Meta:
"UpdateRecordForm"
model = UpdateRecord
fields = ['body']
class OrderedProductForm(forms.ModelForm):
""" Add New Ordered Product """
def __init__(self, user, order, *args, **kwargs):
super(OrderedProductForm, self).__init__(*args, **kwargs)
self.fields['subscription'].queryset = Object.filter_permitted(user, Subscription.objects)
self.fields['subscription'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('sales_ajax_subscription_lookup')})
self.fields['subscription'].widget.attrs.update({'popuplink': reverse('sales_subscription_add')})
self.fields['subscription'].label = _("Subscription")
self.fields['product'].queryset = Object.filter_permitted(user, Product.objects.filter(active=True))
if user.is_admin('maker.sales'):
self.fields['product'].widget.attrs.update({'popuplink': reverse('sales_product_add')})
self.fields['product'].label = _("Product")
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_order_product')[0]
# AJAX to set the initial rate as the currency converted value of product sell price
self.fields['product'].initial = long(conf.value)
except:
pass
# Tax
self.fields['tax'].widget.attrs.update({'popuplink': reverse('finance_tax_add')})
# TODO: rate
# self.fields['rate_display'].label = _("Rate")
# self.fields['rate_display'].help_text = order.currency.code
self.fields['quantity'].label = _("Quantity")
self.fields['quantity'].initial = 1
self.fields['discount'].label = _("Discount")
self.fields['discount'].help_text = "%"
def save(self, *args, **kwargs):
"Set Rate"
instance = super(OrderedProductForm, self).save(commit=False)
if 'product' in self.cleaned_data and self.cleaned_data['product']:
instance.rate = self.cleaned_data['product'].sell_price
instance.rate_display = instance.rate
return instance
class Meta:
"OrderedProductForm"
model = OrderedProduct
fields = ('product', 'quantity', 'subscription', 'tax', 'discount', 'description')
class SubscriptionForm(forms.ModelForm):
""" Add New Subscription """
def __init__(self, user, *args, **kwargs):
super(SubscriptionForm, self).__init__(*args, **kwargs)
del self.fields['cycle_end']
self.fields['product'].queryset = Object.filter_permitted(user, Product.objects)
self.fields['product'].label = _("Product")
self.fields['client'].queryset = Object.filter_permitted(user, Contact.objects)
self.fields['client'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['client'].widget.attrs.update({'popuplink': reverse('identities_contact_add')})
self.fields['client'].label = _("Client")
self.fields['start'].widget.attrs.update({'class': 'datepicker'})
self.fields['start'].label = _("Start")
self.fields['expiry'].widget.attrs.update({'class': 'datepicker'})
self.fields['expiry'].label = _("Expiry")
if 'instance' in kwargs:
self.instance = kwargs['instance']
self.fields['start'].widget.attrs['readonly'] = True
del kwargs['instance']
self.fields['active'].initial = True
self.fields['active'].label = _("Active")
self.fields['cycle_period'].label = _("Cycle period")
self.fields['details'].label = _("Details")
class Meta:
"Subscription Form"
model = Subscription
fields = ('client', 'product', 'start', 'expiry', 'cycle_period', 'cycle_end', 'active', 'details')
class OrderForm(forms.ModelForm):
""" Order form """
def __init__(self, user, lead = None, opportunity = None, *args, **kwargs):
super(OrderForm, self).__init__(*args, **kwargs)
self.fields['reference'].required = False
self.fields['reference'].label = _("Reference")
if hasattr(self, 'instance') and not self.instance.reference:
next_ref = self.instance.get_next_reference()
if next_ref:
self.fields['reference'].initial = next_ref
self.fields['client'].queryset = Object.filter_permitted(user, Contact.objects)
self.fields['client'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['client'].widget.attrs.update({'popuplink': reverse('identities_contact_add')})
self.fields['client'].label = _("Client")
self.fields['source'].queryset = Object.filter_permitted(user, SaleSource.objects.filter(active=True))
self.fields['source'].label = _("Source")
# Currency
self.fields['currency'].label = _('Currency')
instance = getattr(self, 'instance', None)
if instance and instance.id:
del self.fields['currency']
else:
self.fields['currency'].widget.attrs.update({'popuplink': reverse('finance_currency_add')})
self.fields['currency'].initial = Currency.objects.get(is_default=True)
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_order_source')[0]
self.fields['source'].initial = long(conf.value)
except:
pass
self.fields['status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_sales=True))
self.fields['status'].label = _("Status")
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_order_status')[0]
self.fields['status'].initial = long(conf.value)
except:
pass
if opportunity:
self.fields['opportunity'].queryset = Object.filter_permitted(user, Opportunity.objects)
self.fields['opportunity'].label = _("Opportunity")
self.fields['opportunity'].initial = opportunity.id
self.fields['client'].initial = opportunity.contact_id
self.fields['source'].initial = opportunity.source_id
self.fields['assigned'].initial = [i.id for i in opportunity.assigned.only('id')]
else:
del self.fields['opportunity']
if lead:
self.fields['client'].initial = lead.contact_id
self.fields['source'].initial = lead.source_id
self.fields['assigned'].initial = [i.id for i in lead.assigned.only('id')]
self.fields['assigned'].help_text = ""
self.fields['assigned'].label = _("Assigned to")
self.fields['assigned'].widget.attrs.update({'class': 'multicomplete',
'callback': reverse('identities_ajax_user_lookup')})
self.fields['datetime'].label = _("Date")
self.fields['datetime'].widget.attrs.update({'class': 'datetimepicker'})
self.fields['details'].label = _("Details")
class Meta:
"Sale Order Form"
model = SaleOrder
fields = ('reference', 'client', 'opportunity', 'currency', 'source',
'assigned', 'status', 'datetime', 'details')
class OrderFilterForm(forms.ModelForm):
""" Order Filters definition """
paid = forms.ChoiceField(choices=((None,'-----'), ('paid', _("Paid in full")), ('unpaid', _("Pending Payments"))), required=False)
def __init__(self, user, skip=[], *args, **kwargs):
super(OrderFilterForm, self).__init__(*args, **kwargs)
if 'status' in skip:
del self.fields['status']
else:
self.fields['status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_sales=True))
self.fields['status'].required = False
self.fields['status'].label = _("Status")
self.fields['paid'].label = _("Payment Status")
self.fields['client'].queryset = Object.filter_permitted(user, Contact.objects)
self.fields['client'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['client'].required = False
self.fields['client'].label = _("Client")
self.fields['source'].queryset = Object.filter_permitted(user, SaleSource.objects.filter(active=True))
self.fields['source'].required = False
self.fields['source'].label = _("Source")
self.fields['assigned'].label = _("Assigned")
self.fields['assigned'].widget.attrs.update({'class': 'multicomplete',
'callback': reverse('identities_ajax_user_lookup')})
if 'assigned' in skip:
del self.fields['assigned']
else:
self.fields['assigned'].help_text = ""
class Meta:
"Order Filter Form"
model = SaleOrder
fields = ('client', 'source', 'assigned', 'status')
class LeadForm(forms.ModelForm):
""" Lead form """
def __init__(self, user, *args, **kwargs):
super(LeadForm, self).__init__(*args, **kwargs)
self.fields['contact'].queryset = Object.filter_permitted(user, Contact.objects)
self.fields['contact'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['contact'].widget.attrs.update({'popuplink': reverse('identities_contact_add')})
self.fields['contact'].label = _("Contact")
self.fields['source'].queryset = Object.filter_permitted(user, SaleSource.objects.filter(active=True))
self.fields['source'].label = _("Source")
self.fields['products_interested'].queryset = Object.filter_permitted(user, Product.objects)
self.fields['products_interested'].help_text = ""
self.fields['products_interested'].widget.attrs.update({'popuplink': reverse('sales_product_add')})
self.fields['products_interested'].label = _("Products interested")
self.fields['assigned'].help_text = ""
self.fields['assigned'].label = _("Assigned to")
self.fields['assigned'].widget.attrs.update({'class': 'multicomplete',
'callback': reverse('identities_ajax_user_lookup')})
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_order_product')[0]
self.fields['products_interested'].initial = [long(conf.value)]
except:
pass
self.fields['status'].queryset = Object.filter_permitted(user, SaleStatus.objects.filter(use_leads=True))
self.fields['status'].label = _("Status")
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_lead_status')[0]
self.fields['status'].initial = long(conf.value)
except:
pass
self.fields['contact_method'].label = _("Contact method")
self.fields['details'].label = _("Details")
class Meta:
"Lead Form"
model = Lead
fields = ('contact', 'source', 'products_interested', 'contact_method',
'assigned', 'status', 'details')
class LeadFilterForm(forms.ModelForm):
""" Ticket Filters definition """
def __init__(self, user, skip=[], *args, **kwargs):
super(LeadFilterForm, self).__init__(*args, **kwargs)
self.fields['contact'].queryset = Object.filter_permitted(user, Contact.objects)
self.fields['contact'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['contact'].required = False
self.fields['contact'].label = _("Contact")
self.fields['products_interested'].queryset = Object.filter_permitted(user, Product.objects)
self.fields['products_interested'].required = False
self.fields['products_interested'].help_text = ""
self.fields['products_interested'].label = _("Products interested")
self.fields['source'].queryset = Object.filter_permitted(user,
SaleSource.objects.filter(active=True))
self.fields['source'].required = False
self.fields['source'].label = _("Source")
self.fields['status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_leads=True))
self.fields['status'].required = False
self.fields['status'].label = _("Status")
self.fields['contact_method'].required = False
self.fields['contact_method'].label = _("Contact method")
class Meta:
"Lead Filter Form"
model = Lead
fields = ('contact', 'source', 'products_interested', 'contact_method', 'status')
class OpportunityForm(forms.ModelForm):
""" Opportunity form """
def __init__(self, user, lead, *args, **kwargs):
super(OpportunityForm, self).__init__(*args, **kwargs)
self.fields['lead'].queryset = Object.filter_permitted(user, Lead.objects)
self.fields['contact'].queryset = Object.filter_permitted(user, Contact.objects)
self.fields['contact'].widget.attrs.update({'popuplink': reverse('identities_contact_add')})
self.fields['contact'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['products_interested'].queryset = Object.filter_permitted(user, Product.objects)
self.fields['products_interested'].widget.attrs.update({'popuplink': reverse('sales_product_add')})
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_order_product')[0]
self.fields['products_interested'].initial = [long(conf.value)]
except:
pass
self.fields['source'].queryset = Object.filter_permitted(user,
SaleSource.objects.filter(active=True))
self.fields['status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_opportunities=True))
self.fields['assigned'].widget.attrs.update({'class': 'multicomplete',
'callback': reverse('identities_ajax_user_lookup')})
try:
conf = ModuleSetting.get_for_module('maker.sales', 'default_opportunity_status')[0]
self.fields['status'].initial = long(conf.value)
except:
pass
if lead:
self.fields['lead'].initial = lead.id
self.fields['contact'].initial = lead.contact_id
self.fields['products_interested'].initial = [i.id for i in lead.products_interested.only('id')]
self.fields['source'].initial = lead.source_id
self.fields['assigned'].initial = [i.id for i in lead.assigned.only('id')]
else:
del self.fields['lead']
self.fields['products_interested'].help_text = ""
self.fields['assigned'].help_text = ""
self.fields['expected_date'].widget.attrs.update({'class': 'datepicker'})
self.fields['closed_date'].widget.attrs.update({'class': 'datepicker'})
self.fields['contact'].label = _("Contact")
self.fields['products_interested'].label = _("Products interested")
self.fields['source'].label = _("Source")
self.fields['expected_date'].label = _("Expected date")
self.fields['closed_date'].label = _("Closed date")
self.fields['assigned'].label = _("Assigned to")
self.fields['amount_display'].label = _("Amount")
self.fields['amount_currency'].label = _("Currency")
self.fields['amount_currency'].widget.attrs.update({'popuplink': reverse('finance_currency_add')})
self.fields['amount_currency'].initial = Currency.objects.get(is_default=True)
self.fields['probability'].label = _("Probability")
self.fields['status'].label = _("Status")
self.fields['details'].label = _("Details")
class Meta:
"Opportunity Form"
model = Opportunity
fields = ('lead', 'contact', 'products_interested', 'source',
'expected_date', 'closed_date', 'assigned', 'amount_currency', 'amount_display', 'probability', 'status', 'details')
class OpportunityFilterForm(forms.ModelForm):
""" Opportunity Filters """
def __init__(self, user, skip=[], *args, **kwargs):
super(OpportunityFilterForm, self).__init__(*args, **kwargs)
self.fields['contact'].queryset = Object.filter_permitted(user, Contact.objects)
self.fields['contact'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_contact_lookup')})
self.fields['contact'].required = False
self.fields['contact'].label = _("Contact")
self.fields['source'].queryset = Object.filter_permitted(user,
SaleSource.objects.filter(active=True))
self.fields['source'].required = False
self.fields['source'].label = _("Source")
self.fields['products_interested'].queryset = Object.filter_permitted(user,
Product.objects.filter(active=True))
self.fields['products_interested'].required = False
self.fields['products_interested'].help_text = ""
self.fields['products_interested'].label = _("Products interested")
self.fields['status'].queryset = Object.filter_permitted(user,
SaleStatus.objects.filter(use_opportunities=True))
self.fields['status'].required = False
self.fields['status'].label = _("Status")
class Meta:
"Opportunity Filter Form"
model = Opportunity
fields = ('contact', 'products_interested', 'source', 'status')
|
[
"[email protected]"
] | |
296867493cf551a5404fef89aabc4a5b7d4f615b
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/tree-big-2596.py
|
697a6e67ab606c514e991a7cf4f6bf70f29f748e
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 23,292 |
py
|
# Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert($Exp)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
|
[
"[email protected]"
] | |
b1c8b3133b7b734b65f124e5a32c497835087c81
|
a6b1f5ac26861dc7efd002cf3795e70f58eb76fe
|
/train/train_mnist.py
|
053bd48b3ce0c55507dc14da6b01f32919eed938
|
[] |
no_license
|
BingzheWu/BayesNet
|
6ad0e2acb1f901aaa4cd758fc815bf6cfb03742b
|
7c3f87486c67e42d3c2a64548dde4a0edcb73bb3
|
refs/heads/master
| 2020-03-29T22:34:58.261930 | 2019-07-12T09:31:07 | 2019-07-12T09:31:07 | 150,428,694 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,009 |
py
|
import torch
import sys
sys.path.append('.')
from utils import write_loss_scalars
from utils import write_weight_histograms
from models.minist_models import BayesLeNet
from dataset import data_factory
from train.train_utils import parse_args, bayes_batch_processor, get_logger
from mmcv import Config
from mmcv.runner import Runner
from torch.nn.parallel import DataParallel
from dataset.data_factory import make_dataset, make_dataloader
from models.model_factory import model_creator
def main():
args = parse_args()
cfg = Config.fromfile(args.cfg_file)
print(cfg)
logger = get_logger(cfg.log_level)
if args.launcher == 'none':
dist = False
else:
pass
if dist:
pass
else:
num_workers = cfg.data_workers
batch_size = cfg.batch_size
train_sampler = None
val_sampler = None
shuffle = True
train_dataset = make_dataset(cfg, True)
train_loader = make_dataloader(train_dataset, batch_size, num_workers, shuffle, train_sampler)
val_dataset = make_dataset(cfg, False)
val_loader = make_dataloader(val_dataset, batch_size, num_workers, shuffle, val_sampler)
model = model_creator(cfg)
if dist:
pass
else:
#model = DataParallel(model, device_ids=[0, 1]).cuda()
device = 'cuda'
model = model.to(device)
#model = DataParallel(model, device_ids=[0,1]).cuda()
runner = Runner(
model,
bayes_batch_processor,
None,
cfg.work_dir,
log_level=cfg.log_level
)
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
]
)
runner.register_training_hooks(
lr_config=cfg.lr_config,
optimizer_config=None,
checkpoint_config=cfg.checkpoint_config,
log_config=log_config
)
workflow = [('train', 1), ('val', 1)]
runner.run([train_loader, val_loader], workflow, cfg.total_epochs)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
887b1abd80116fb103cbdd1166213d2360b0017e
|
0e3d1d4107a2664663c6906697faa3a41c7d4f57
|
/src/ui/view/opalview/aui2.py
|
2f91af53c015e51a85cd8dc3d4d5b752f2743740
|
[] |
no_license
|
struts2spring/Opal
|
c0915452fd4eab4c0bd4690cac346be8e6dc3f60
|
c59c03baa10c915ca7c05196ed411da4a26ff49d
|
refs/heads/master
| 2021-01-23T21:01:39.977412 | 2017-07-09T16:49:44 | 2017-07-09T16:49:44 | 48,582,382 | 0 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,353 |
py
|
import wx
import wx.aui
# import images # contains toolbar icons
class MyFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(self, None, wx.ID_ANY,
"AUI Tutorial",
size=(600,400))
self._mgr = wx.aui.AuiManager()
self._mgr.SetManagedWindow(self)
notebook = wx.aui.AuiNotebook(self)
nb_panel = TabPanel(notebook)
my_panel = MyPanel(self)
notebook.AddPage(nb_panel, "First Tab", False)
self._mgr.AddPane(notebook,
wx.aui.AuiPaneInfo().Name("notebook-content").
CenterPane().PaneBorder(False))
self._mgr.AddPane(my_panel,
wx.aui.AuiPaneInfo().Name("txtctrl-content").
CenterPane().PaneBorder(False))
self._mgr.GetPane("notebook-content").Show().Top().Layer(0).Row(0).Position(0)
self._mgr.GetPane("txtctrl-content").Show().Bottom().Layer(1).Row(0).Position(0)
self._mgr.Update()
class MyPanel(wx.Panel):
"""
My panel with a toolbar and richtextctrl
"""
def __init__(self,parent):
wx.Panel.__init__(self,parent=parent,id=wx.ID_ANY)
sizer = wx.BoxSizer(wx.VERTICAL)
toolbar = wx.ToolBar(self,-1)
toolbar.AddLabelTool(wx.ID_EXIT, '',wx.art_q)
self.Bind(wx.EVT_TOOL, self.OnExit, id=wx.ID_EXIT)
toolbar.Realize()
sizer.Add(toolbar,proportion=0,flag=wx.ALL | wx.ALIGN_TOP)
text = ""
txtctrl = wx.TextCtrl(self,-1, text, wx.Point(0, 0), wx.Size(150, 90),
wx.NO_BORDER | wx.TE_MULTILINE | wx.TE_READONLY|wx.HSCROLL)
sizer.Add(txtctrl,proportion=0,flag=wx.EXPAND)
self.SetSizer(sizer)
def OnExit(self,event):
self.Close()
class TabPanel(wx.Panel):
def __init__(self,parent):
wx.Panel.__init__(self,parent=parent,id=wx.ID_ANY)
sizer = wx.BoxSizer(wx.VERTICAL)
txtOne = wx.TextCtrl(self, wx.ID_ANY, "")
txtTwo = wx.TextCtrl(self, wx.ID_ANY, "")
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(txtOne, 0, wx.ALL, 5)
sizer.Add(txtTwo, 0, wx.ALL, 5)
self.SetSizer(sizer)
if __name__ == "__main__":
app = wx.PySimpleApp()
frame = MyFrame()
frame.Show()
app.MainLoop()
|
[
"[email protected]"
] | |
f5b0d6e2ad6c275feacd9fd0fdd544bd3a7e7b88
|
12e27bcea0c43655f3c0c4690c67de2feaf8edad
|
/apptools/apptools-android-tests/apptools/manifest_xwalk_permissions.py
|
db57069587b5188da4e69dc2d3524a9f0d699666
|
[
"BSD-3-Clause"
] |
permissive
|
xzhan96/crosswalk-test-suite
|
b24288443463698cd60f74ff25b0e9b262d8d640
|
47710b138f4ed3498b40c2480811e24ff8d0435a
|
refs/heads/master
| 2021-01-20T23:05:45.268897 | 2016-02-04T06:34:19 | 2016-02-04T06:34:19 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,960 |
py
|
#!/usr/bin/env python
#
# Copyright (c) 2016 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Yun, Liu<[email protected]>
import unittest
import os
import comm
import shutil
from xml.etree import ElementTree
import json
class TestCrosswalkApptoolsFunctions(unittest.TestCase):
def test_permission_default(self):
comm.setUp()
os.chdir(comm.XwalkPath)
comm.clear("org.xwalk.test")
os.mkdir("org.xwalk.test")
os.chdir('org.xwalk.test')
cmd = comm.HOST_PREFIX + comm.PackTools + \
"crosswalk-pkg --platforms=android --android=" + comm.ANDROID_MODE + ' --manifest="org.xwalk.test" --keep --crosswalk=' + comm.crosswalkzip + " ./"
(return_code, output) = comm.getstatusoutput(cmd)
projectDir = output[0].split(" * " + os.linesep)[-1].split(' ')[-1].strip(os.linesep)
root = ElementTree.parse(projectDir + "/prj/android/AndroidManifest.xml").getroot()
permission_attributes = root.findall('uses-permission')
name = []
for x in permission_attributes:
name.append(x.attrib.items()[0][1])
comm.clear("org.xwalk.test")
self.assertEquals(return_code, 0)
self.assertEquals(len(permission_attributes), 3)
self.assertIn("android.permission.ACCESS_NETWORK_STATE", name)
self.assertIn("android.permission.ACCESS_WIFI_STATE", name)
self.assertIn("android.permission.INTERNET", name)
def test_permission_name(self):
comm.setUp()
os.chdir(comm.XwalkPath)
comm.clear("org.xwalk.test")
os.mkdir("org.xwalk.test")
os.chdir('org.xwalk.test')
cmd = comm.HOST_PREFIX + comm.PackTools + \
"crosswalk-pkg --platforms=android --android=" + comm.ANDROID_MODE + " --keep --crosswalk=" + comm.crosswalkzip + " " + comm.ConstPath + "/../testapp/camera_permissions_enable/"
(return_code, output) = comm.getstatusoutput(cmd)
projectDir = output[0].split(" * " + os.linesep)[-1].split(' ')[-1].strip(os.linesep)
root = ElementTree.parse(projectDir + "/prj/android/AndroidManifest.xml").getroot()
permission_attributes = root.findall('uses-permission')
name = []
for x in permission_attributes:
name.append(x.attrib.items()[0][1])
comm.clear("org.xwalk.test")
self.assertEquals(return_code, 0)
self.assertEquals(len(permission_attributes), 4)
self.assertIn("android.permission.CAMERA", name)
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
fa9e8b9e1d471cf9ad80bcbe1a6ef93e1748b3e1
|
f1a9769b3589d802a4c26adfbe67915b920f3b49
|
/dj_rest_security/rest_demo/views.py
|
ba976ed64928bb0036c3eb37278aa73f5fbb6bd1
|
[] |
no_license
|
chavhanpunamchand/Django_REST_framework
|
ac8b04102439f153ee77f8572dded20aac02121f
|
c4d602c201dff4caec645049c733127f20c0fa57
|
refs/heads/main
| 2023-03-18T07:42:37.072865 | 2021-03-04T13:34:05 | 2021-03-04T13:34:05 | 336,339,394 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 457 |
py
|
#ReadOnlyModelViewSet
from rest_framework.viewsets import ModelViewSet
from .models import Emp
from .empserializer import EmpToJson
# from rest_framework.viewsets import GenericViewSet
# from rest_framework.mixins import DestroyModelMixin,UpdateModelMixin
# class MyOwnViewSet(GenericViewSet,DestroyModelMixin,UpdateModelMixin):
# pass
# model to json
class EmpCrudAPIs(ModelViewSet):
queryset = Emp.objects.all()
serializer_class = EmpToJson
|
[
"[email protected]"
] | |
f940d633eef8179b017a5b3b1f99d2d04f3b6def
|
0e1e643e864bcb96cf06f14f4cb559b034e114d0
|
/Exps_7_v3/doc3d/Wyx_w_M_w_Sob_to_Wz_focus/IN_Sob_k15_EroM/Sob_k05_s001_EroM/pyr_Tcrop255_p20_j15/pyr_5s/L5/step10_a.py
|
365f3ade28e90b9af7f43cbf404b02405444c548
|
[] |
no_license
|
KongBOy/kong_model2
|
33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307
|
1af20b168ffccf0d5293a393a40a9fa9519410b2
|
refs/heads/master
| 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 139,845 |
py
|
#############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
code_dir = "\\".join(code_exe_path_element[:-1])
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
sys.path.append(code_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" code_dir:", code_dir)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir) ### 舉例: template_dir: 7_mask_unet/5_os_book_and_paper_have_dtd_hdr_mix_bg_tv_s04_mae
#############################################################################################################################################################################################################
exp_dir = template_dir
#############################################################################################################################################################################################################
from step06_a_datas_obj import *
from step09_5side_L5 import *
from step10_a2_loss_info_obj import *
from step10_b2_exp_builder import Exp_builder
rm_paths = [path for path in sys.path if code_dir in path]
for rm_path in rm_paths: sys.path.remove(rm_path)
rm_moduless = [module for module in sys.modules if "step09" in module]
for rm_module in rm_moduless: del sys.modules[rm_module]
#############################################################################################################################################################################################################
'''
exp_dir 是 決定 result_dir 的 "上一層"資料夾 名字喔! exp_dir要巢狀也沒問題~
比如:exp_dir = "6_mask_unet/自己命的名字",那 result_dir 就都在:
6_mask_unet/自己命的名字/result_a
6_mask_unet/自己命的名字/result_b
6_mask_unet/自己命的名字/...
'''
use_db_obj = type8_blender_kong_doc3d_in_W_gt_W_ch_norm_v2
use_loss_obj = [G_sobel_k5_erose_M_loss_info_builder.set_loss_target("UNet_Wz").copy()]
#############################################################
### 為了resul_analyze畫空白的圖,建一個empty的 Exp_builder
empty = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="為了resul_analyze畫空白的圖,建一個empty的 Exp_builder")
##################################
### 1side1
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_1__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
##################################
### 1side2
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_2__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_2__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
##################################
### 1side3
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_3__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_3__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_3__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
##################################
### 1side4
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_4__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_4__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_4__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_1side_4__2side_4__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
##################################
### 1side5
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_5__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_5__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_5__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_1side_5__2side_4__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_1side_5__2side_5__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
##################################
### 5side6
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_1side_6__2side_1__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_1__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_1__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_1side_6__2side_2__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_1side_6__2side_3__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_1side_6__2side_4__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_1side_6__2side_5__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
# 1 3 6 10 15 "21" 28 36 45 55
# 2side6 OK 56
ch032_1side_6__2side_6__3side_1_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_1_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_1_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_2_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_1_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_1_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_1_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_2_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_2_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_3_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_3_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_3_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_4_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_5_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s1, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s1.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s2, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s2.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s3, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s3.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s4, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s4.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s5, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s5.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6_4side_6_5s6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900, it_save_fq=900 * 2, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="")
#############################################################
if(__name__ == "__main__"):
print("build exps cost time:", time.time() - start_time)
if len(sys.argv) < 2:
############################################################################################################
### 直接按 F5 或打 python step10_b1_exp_obj_load_and_train_and_test.py,後面沒有接東西喔!才不會跑到下面給 step10_b_subprocss.py 用的程式碼~~~
ch032_1side_1__2side_1__3side_1_4side_1_5s1.build().run()
# print('no argument')
sys.exit()
### 以下是給 step10_b_subprocess.py 用的,相當於cmd打 python step10_b1_exp_obj_load_and_train_and_test.py 某個exp.build().run()
eval(sys.argv[1])
|
[
"[email protected]"
] | |
892dc7ef6d0384a18b2c2d612fb7032dad0acffe
|
03a9facf029d3b63ef996f34395e407b2a9bafe0
|
/3rdParty/waf/waflib/extras/cabal.py
|
1604c753e68af733d796904002b8181b13744cd2
|
[
"MIT"
] |
permissive
|
MDudek-ICS/peach
|
737b29346685106c50035177f3e656661342d5b6
|
2bea2d524707a98d007ca39455a3db175b44e8ed
|
refs/heads/main
| 2023-03-06T19:10:48.312671 | 2021-02-17T22:00:09 | 2021-02-17T22:00:09 | 372,488,843 | 1 | 0 |
NOASSERTION
| 2021-05-31T11:51:38 | 2021-05-31T11:51:37 | null |
UTF-8
|
Python
| false | false | 5,307 |
py
|
#!/usr/bin/env python
# encoding: utf-8
# Anton Feldmann, 2012
# "Base for cabal"
import re
import time
from waflib import TaskGen, Task, Utils
from waflib.Configure import conf
from waflib.Task import always_run
from waflib.TaskGen import extension, feature, after, before, before_method
from waflib.Utils import threading
from shutil import rmtree
lock = threading.Lock()
registering = False
def configure(self):
self.find_program('cabal', var='CABAL')
self.find_program('ghc-pkg', var='GHCPKG')
pkgconfd = self.bldnode.abspath() + '/package.conf.d'
self.env.PREFIX = self.bldnode.abspath() + '/dist'
self.env.PKGCONFD = pkgconfd
if self.root.find_node(pkgconfd + '/package.cache'):
self.msg('Using existing package database', pkgconfd, color='CYAN')
else:
pkgdir = self.root.find_dir(pkgconfd)
if pkgdir:
self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED')
rmtree(pkgdir.abspath())
pkgdir = None
self.cmd_and_log([self.env.GHCPKG, 'init', pkgconfd])
self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN')
@extension('.cabal')
def process_cabal(self, node):
out_dir_node = self.bld.root.find_dir(self.bld.out_dir)
package_node = node.change_ext('.package')
package_node = out_dir_node.find_or_declare(package_node.name)
build_node = node.parent.get_bld()
build_path = build_node.abspath()
config_node = build_node.find_or_declare('setup-config')
inplace_node = build_node.find_or_declare('package.conf.inplace')
config_task = self.create_task('cabal_configure', node)
config_task.cwd = node.parent.abspath()
config_task.depends_on = getattr(self, 'depends_on', '')
config_task.build_path = build_path
config_task.set_outputs(config_node)
build_task = self.create_task('cabal_build', config_node)
build_task.cwd = node.parent.abspath()
build_task.build_path = build_path
build_task.set_outputs(inplace_node)
copy_task = self.create_task('cabal_copy', inplace_node)
copy_task.cwd = node.parent.abspath()
copy_task.depends_on = getattr(self, 'depends_on', '')
copy_task.build_path = build_path
last_task = copy_task
task_list = [config_task, build_task, copy_task]
if (getattr(self, 'register', False)):
register_task = self.create_task('cabal_register', inplace_node)
register_task.cwd = node.parent.abspath()
register_task.set_run_after(copy_task)
register_task.build_path = build_path
pkgreg_task = self.create_task('ghcpkg_register', inplace_node)
pkgreg_task.cwd = node.parent.abspath()
pkgreg_task.set_run_after(register_task)
pkgreg_task.build_path = build_path
last_task = pkgreg_task
task_list += [register_task, pkgreg_task]
touch_task = self.create_task('cabal_touch', inplace_node)
touch_task.set_run_after(last_task)
touch_task.set_outputs(package_node)
touch_task.build_path = build_path
task_list += [touch_task]
return task_list
def get_all_src_deps(node):
hs_deps = node.ant_glob('**/*.hs')
hsc_deps = node.ant_glob('**/*.hsc')
lhs_deps = node.ant_glob('**/*.lhs')
c_deps = node.ant_glob('**/*.c')
cpp_deps = node.ant_glob('**/*.cpp')
proto_deps = node.ant_glob('**/*.proto')
return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], [])
class Cabal(Task.Task):
def scan(self):
return (get_all_src_deps(self.generator.path), ())
class cabal_configure(Cabal):
run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}'
shell = True
def scan(self):
out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir)
deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)]
return (deps, ())
class cabal_build(Cabal):
run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/'
shell = True
class cabal_copy(Cabal):
run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}'
shell = True
class cabal_register(Cabal):
run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}'
shell = True
class ghcpkg_register(Cabal):
run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config'
shell = True
def runnable_status(self):
global lock, registering
val = False
lock.acquire()
val = registering
lock.release()
if val:
return Task.ASK_LATER
ret = Task.Task.runnable_status(self)
if ret == Task.RUN_ME:
lock.acquire()
registering = True
lock.release()
return ret
def post_run(self):
global lock, registering
lock.acquire()
registering = False
lock.release()
return Task.Task.post_run(self)
class cabal_touch(Cabal):
run_str = 'touch ${TGT}'
|
[
"[email protected]"
] | |
beefceea334dfe704bf83329bb46863e9452c439
|
4d9bb8c45b2c65a0083bc7c0f6c8c9905ee09dd2
|
/flexx/pyscript/tests/test_parser1.py
|
bd3e6184d50acdb446111116eab7992d17da6a17
|
[
"BSD-2-Clause"
] |
permissive
|
fangbei/flexx
|
d291da4a58abe64f101a0adc5f7aaae965b35505
|
8a48e8800cbe79725d641ae53fcaad58e3f75d8a
|
refs/heads/master
| 2021-01-21T16:48:07.660320 | 2015-11-30T16:13:08 | 2015-11-30T16:13:08 | 47,130,369 | 0 | 0 | null | 2015-11-30T16:10:26 | 2015-11-30T16:10:26 | null |
UTF-8
|
Python
| false | false | 14,752 |
py
|
from pytest import raises
from flexx.util.testing import run_tests_if_main
from flexx.pyscript import JSError, py2js, evaljs, evalpy, Parser
from flexx import pyscript
def nowhitespace(s):
return s.replace('\n', '').replace('\t', '').replace(' ', '')
class TestParser(Parser):
def function_foo_foo(self, node):
return 'xxx'
def method_bar_bar(self, node, base):
return base
class TestTheParser:
def test_special_functions(self):
assert TestParser("foo_foo()").dump() == 'xxx;'
assert TestParser("bar_bar()").dump() == 'bar_bar();'
assert TestParser("xxx.bar_bar()").dump() == 'xxx;'
assert TestParser("xxx.foo_foo()").dump() == 'xxx.foo_foo();'
def test_exceptions(self):
raises(JSError, py2js, "foo(**kwargs)")
class TestExpressions:
""" Tests for single-line statements/expressions
"""
def test_special(self):
assert py2js('') == ''
assert py2js(' \n') == ''
def test_ops(self):
# Test code
assert py2js('2+3') == '2 + 3;' # Binary
assert py2js('2/3') == '2 / 3;'
assert py2js('not 2') == '!2;' # Unary
assert py2js('-(2+3)') == '-(2 + 3);'
assert py2js('True and False') == 'true && false;' # Boolean
# No parentices around names, numbers and strings
assert py2js('foo - bar') == "foo - bar;"
assert py2js('_foo3 - _bar4') == "_foo3 - _bar4;"
assert py2js('3 - 4') == "3 - 4;"
assert py2js('"abc" - "def"') == "'abc' - 'def';"
assert py2js("'abc' - 'def'") == "'abc' - 'def';"
assert py2js("'abc' - \"'def\"") == "'abc' - \"'def\";"
# But they should be if it gets more complex
assert py2js('foo - bar > 4') == "(foo - bar) > 4;"
# Test outcome
assert evalpy('2+3') == '5' # Binary
assert evalpy('6/3') == '2'
assert evalpy('4//3') == '1'
assert evalpy('2**8') == '256'
assert evalpy('not True') == 'false' # Unary
assert evalpy('- 3') == '-3'
assert evalpy('True and False') == 'false' # Boolean
assert evalpy('True or False') == 'true'
# Bug
assert evalpy('(9-3-3)/3') == '1'
# string formatting
assert evalpy('"%s" % "bar"') == 'bar'
assert evalpy('"-%s-" % "bar"') == '-bar-'
assert evalpy('"foo %s foo" % "bar"') == 'foo bar foo'
assert evalpy('"x %i" % 6') == 'x 6'
assert evalpy('"x %f" % 6') == 'x 6'
assert evalpy('"%s: %f" % ("value", 6)') == 'value: 6'
assert evalpy('"%r: %r" % ("value", 6)') == '"value": 6'
def test_overloaded_list_ops(self):
assert evalpy('[1, 2] + [3, 4]') == '[ 1, 2, 3, 4 ]'
assert evalpy('[3, 4] + [1, 2]') == '[ 3, 4, 1, 2 ]'
assert evalpy('"ab" + "cd"') == 'abcd'
assert evalpy('[3, 4] * 2') == '[ 3, 4, 3, 4 ]'
assert evalpy('2 * [3, 4]') == '[ 3, 4, 3, 4 ]'
assert evalpy('"ab" * 2') == 'abab'
assert evalpy('2 * "ab"') == 'abab'
assert evalpy('a = [1, 2]; a += [3, 4]; a') == '[ 1, 2, 3, 4 ]'
assert evalpy('a = [3, 4]; a += [1, 2]; a') == '[ 3, 4, 1, 2 ]'
assert evalpy('a = [3, 4]; a *= 2; a') == '[ 3, 4, 3, 4 ]'
assert evalpy('a = "ab"; a *= 2; a') == 'abab'
def test_comparisons(self):
assert py2js('4 > 3') == '4 > 3;'
assert py2js('4 is 3') == '4 === 3;'
assert evalpy('4 > 4') == 'false'
assert evalpy('4 >= 4') == 'true'
assert evalpy('4 < 3') == 'false'
assert evalpy('4 <= 4') == 'true'
assert evalpy('4 == 3') == 'false'
assert evalpy('4 != 3') == 'true'
assert evalpy('4 == "4"') == 'true' # yuck!
assert evalpy('4 is "4"') == 'false'
assert evalpy('4 is not "4"') == 'true'
assert evalpy('"c" in "abcd"') == 'true'
assert evalpy('"x" in "abcd"') == 'false'
assert evalpy('"x" not in "abcd"') == 'true'
assert evalpy('3 in [1,2,3,4]') == 'true'
assert evalpy('9 in [1,2,3,4]') == 'false'
assert evalpy('9 not in [1,2,3,4]') == 'true'
assert evalpy('"bar" in {"foo": 3}') == 'false'
assert evalpy('"foo" in {"foo": 3}') == 'true'
# was a bug
assert evalpy('not (1 is null and 1 is null)') == 'true'
def test_deep_comparisons(self):
# List
arr = '[(1,2), (3,4), (5,6), (1,2), (7,8)]\n'
assert evalpy('a=' + arr + '(1,2) in a') == 'true'
assert evalpy('a=' + arr + '(7,8) in a') == 'true'
assert evalpy('a=' + arr + '(3,5) in a') == 'false'
assert evalpy('a=' + arr + '3 in a') == 'false'
assert evalpy('(2, 3) == (2, 3)') == 'true'
assert evalpy('[2, 3] == [2, 3]') == 'true'
assert evalpy('a=' + arr + 'b=' + arr + 'a==b') == 'true'
# Dict
dct = '{"a":7, 3:"foo", "bar": 1, "9": 3}\n'
assert evalpy('d=' + dct + '"a" in d') == 'true'
assert evalpy('d=' + dct + '"3" in d') == 'true'
assert evalpy('d=' + dct + '3 in d') == 'true'
assert evalpy('d=' + dct + '"bar" in d') == 'true'
assert evalpy('d=' + dct + '9 in d') == 'true'
assert evalpy('d=' + dct + '"9" in d') == 'true'
assert evalpy('d=' + dct + '7 in d') == 'false'
assert evalpy('d=' + dct + '"1" in d') == 'false'
assert evalpy('{2: 3} == {"2": 3}') == 'true'
assert evalpy('dict(foo=7) == {"foo": 7}') == 'true'
assert evalpy('a=' + dct + 'b=' + dct + 'a==b') == 'true'
assert evalpy('{"foo": 1, "bar": 2}=={"bar": 2, "foo": 1}') == 'true'
assert evalpy('{"bar": 2, "foo": 1}=={"foo": 1, "bar": 2}') == 'true'
# Deeper
d1 = 'd1={"foo": [2, 3, {1:2,3:4,5:["aa", "bb"]}], "bar": None}\n'
d2 = 'd2={"bar": None, "foo": [2, 3, {5:["aa", "bb"],1:2,3:4}]}\n' # same
d3 = 'd3={"foo": [2, 3, {1:2,3:4,5:["aa", "b"]}], "bar": None}\n' # minus b
assert evalpy(d1+d2+d3+'d1 == d2') == 'true'
assert evalpy(d1+d2+d3+'d2 == d1') == 'true'
assert evalpy(d1+d2+d3+'d1 != d2') == 'false'
assert evalpy(d1+d2+d3+'d1 == d3') == 'false'
assert evalpy(d1+d2+d3+'d1 != d3') == 'true'
#
assert evalpy(d1+d2+d3+'d2 in [2, d1, 4]') == 'true'
assert evalpy(d1+d2+d3+'d2 in ("xx", d2, None)') == 'true'
assert evalpy(d1+d2+d3+'d2 not in (1, d3, 2)') == 'true'
assert evalpy(d1+d2+d3+'4 in [2, d1, 4]') == 'true'
def test_truthfulness_of_basic_types(self):
# Numbers
assert evalpy('"T" if (1) else "F"') == 'T'
assert evalpy('"T" if (0) else "F"') == 'F'
# Strings
assert evalpy('"T" if ("a") else "F"') == 'T'
assert evalpy('"T" if ("") else "F"') == 'F'
# None - undefined
assert evalpy('None is null') == 'true'
assert evalpy('None is undefined') == 'false'
assert evalpy('undefined is undefined') == 'true'
def test_truthfulness_of_array_and_dict(self):
# Arrays
assert evalpy('bool([1])') == 'true'
assert evalpy('bool([])') == 'false'
#
assert evalpy('"T" if ([1, 2, 3]) else "F"') == 'T'
assert evalpy('"T" if ([]) else "F"') == 'F'
#
assert evalpy('if [1]: "T"\nelse: "F"') == 'T'
assert evalpy('if []: "T"\nelse: "F"') == 'F'
#
assert evalpy('if [1] and 1: "T"\nelse: "F"') == 'T'
assert evalpy('if [] and 1: "T"\nelse: "F"') == 'F'
assert evalpy('if [] or 1: "T"\nelse: "F"') == 'T'
#
assert evalpy('[2] or 42') == '[ 2 ]'
assert evalpy('[] or 42') == '42'
# Dicts
assert evalpy('bool({1:2})') == 'true'
assert evalpy('bool({})') == 'false'
#
assert evalpy('"T" if ({"foo": 3}) else "F"') == 'T'
assert evalpy('"T" if ({}) else "F"') == 'F'
#
assert evalpy('if {1:2}: "T"\nelse: "F"') == 'T'
assert evalpy('if {}: "T"\nelse: "F"') == 'F'
#
assert evalpy('if {1:2} and 1: "T"\nelse: "F"') == 'T'
assert evalpy('if {} and 1: "T"\nelse: "F"') == 'F'
assert evalpy('if {} or 1: "T"\nelse: "F"') == 'T'
#
assert evalpy('{1:2} or 42') == "{ '1': 2 }"
assert evalpy('{} or 42') == '42'
# Eval extra types
assert evalpy('null or 42') == '42'
assert evalpy('ArrayBuffer(4) or 42') != '42'
# No bools
assert py2js('if foo: pass').count('_truthy')
assert py2js('if foo.length: pass').count('_truthy') == 0
assert py2js('if 3: pass').count('_truthy') == 0
assert py2js('if True: pass').count('_truthy') == 0
assert py2js('if a == 3: pass').count('_truthy') == 0
assert py2js('if a is 3: pass').count('_truthy') == 0
def test_indexing_and_slicing(self):
c = 'a = [1, 2, 3, 4, 5]\n'
# Indexing
assert evalpy(c + 'a[2]') == '3'
assert evalpy(c + 'a[-2]') == '4'
# Slicing
assert evalpy(c + 'a[:]') == '[ 1, 2, 3, 4, 5 ]'
assert evalpy(c + 'a[1:-1]') == '[ 2, 3, 4 ]'
def test_assignments(self):
assert py2js('foo = 3') == 'var foo;\nfoo = 3;' # with var
assert py2js('foo.bar = 3') == 'foo.bar = 3;' # without var
code = py2js('foo = 3; bar = 4') # define both
assert code.count('var') == 1
code = py2js('foo = 3; foo = 4') # only define first time
assert code.count('var') == 1
code = py2js('foo = bar = 3') # multiple assignment
assert 'foo = bar = 3' in code
assert 'var bar, foo' in code # alphabetic order
# self -> this
assert py2js('self') == 'this;'
assert py2js('self.foo') == 'this.foo;'
# Indexing
assert evalpy('a=[0,0]\na[0]=2\na[1]=3\na', False) == '[2,3]'
# Tuple unpacking
evalpy('x=[1,2,3]\na, b, c = x\nb', False) == '2'
evalpy('a,b,c = [1,2,3]\nc,b,a = a,b,c\n[a,b,c]', False) == '[3,2,1]'
# Class variables don't get a var
code = py2js('class Foo:\n bar=3\n bar = bar + 1')
assert code.count('bar') == 3
assert code.count('Foo.prototype.bar') == 3
def test_aug_assignments(self):
# assign + bin op
assert evalpy('x=5; x+=1; x') == '6'
assert evalpy('x=5; x/=2; x') == '2.5'
assert evalpy('x=5; x**=2; x') == '25'
assert evalpy('x=5; x//=2; x') == '2'
def test_basic_types(self):
assert py2js('True') == 'true;'
assert py2js('False') == 'false;'
assert py2js('None') == 'null;'
assert py2js('"bla\\"bla"') == "'bla\"bla';"
assert py2js('3') == '3;'
assert py2js('3.1415') == '3.1415;'
assert py2js('[1,2,3]') == '[1, 2, 3];'
assert py2js('(1,2,3)') == '[1, 2, 3];'
assert py2js('{foo: 3, bar: 4}') == '{foo: 3, bar: 4};'
def test_ignore_import_of_compiler(self):
modname = pyscript.__name__
assert py2js('from %s import x, y, z\n42' % modname) == '42;'
def test_import(self):
# time
import time
assert abs(float(evalpy('import time; time.time()')) - time.time()) < 0.5
assert abs(float(evalpy('from time import time; time()')) - time.time()) < 0.5
assert evalpy('import time; t0=time.perf_counter(); t1=time.perf_counter(); (t1-t0)').startswith('0.0')
# sys
assert 'pyscript' in evalpy('import sys; sys.version').lower()
def test_funcion_call(self):
jscode = 'var foo = function (x, y) {return x+y;};'
assert evaljs(jscode + py2js('foo(2,2)')) == '4'
assert evaljs(jscode + py2js('foo("so ", True)')) == 'so true'
assert evaljs(jscode + py2js('a=[1,2]; foo(*a)')) == '3'
assert evaljs(jscode + py2js('a=[1,2]; foo(7, *a)')) == '8'
# Test super (is tested for real in test_parser3.py
assert evalpy('d={"_base_class": console};d._base_class.log(4)') == '4'
assert evalpy('d={"_base_class": console};d._base_class.log()') == ''
jscode = 'var foo = function () {return this.val};'
jscode += 'var d = {"foo": foo, "val": 7};\n'
assert evaljs(jscode + py2js('d["foo"]()')) == '7'
assert evaljs(jscode + py2js('d["foo"](*[3, 4])')) == '7'
def test_instantiation(self):
# Test creating instances
assert 'new' in py2js('a = Foo()')
assert 'new' in py2js('a = x.Foo()')
assert 'new' not in py2js('a = foo()')
assert 'new' not in py2js('a = _foo()')
assert 'new' not in py2js('a = _Foo()')
assert 'new' not in py2js('a = this.Foo()')
assert 'new' not in py2js('a = JSON.stringify(x)')
jscode = 'function Foo() {this.x = 3}\nvar x=1;\n'
assert evaljs(jscode + py2js('a=Foo()\nx')) == '1'
# Existing classes and functions are used to determine if a
# call is an instantiation
assert 'new' in py2js('class foo:pass\na = foo()')
assert 'new' not in py2js('class foo:pass\ndef foo():pass\na = foo()')
assert 'new' not in py2js('def foo():pass\nclass foo:pass\na = foo()')
#
assert 'new' not in py2js('def Foo():pass\na = Foo()')
assert 'new' in py2js('def Foo():pass\nclass Foo:pass\na = Foo()')
assert 'new' in py2js('class Foo:pass\ndef Foo():pass\na = Foo()')
def test_pass(self):
assert py2js('pass') == ''
def test_delete(self):
assert evalpy('d={}\nd.foo=3\n\nd') == "{ foo: 3 }"
assert evalpy('d={}\nd.foo=3\ndel d.foo\nd') == '{}'
assert evalpy('d={}\nd.foo=3\nd.bar=3\ndel d.foo\nd') == '{ bar: 3 }'
assert evalpy('d={}\nd.foo=3\nd.bar=3\ndel d.foo, d["bar"]\nd') == '{}'
class TestModules:
def test_module(self):
code = Parser('"docstring"\nfoo=3;bar=4;_priv=0;', 'mymodule').dump()
# Has docstring
assert code.count('// docstring') == 1
# Test that global variables exist
assert evaljs(code+'mymodule.foo+mymodule.bar') == '7'
# And privates do not
assert evaljs(code+'mymodule._priv===undefined') == 'true'
run_tests_if_main()
# if __name__ == '__main__':
# t = TestClasses()
# t.test_class()
# t.test_inheritance()
|
[
"[email protected]"
] | |
b463ae2a7833d5227feb616aa4858b1c0db24b6d
|
82b946da326148a3c1c1f687f96c0da165bb2c15
|
/sdk/python/pulumi_azure_native/servicefabricmesh/outputs.py
|
f3e65901f80b57e9814384a33bad4839d7cc9054
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
morrell/pulumi-azure-native
|
3916e978382366607f3df0a669f24cb16293ff5e
|
cd3ba4b9cb08c5e1df7674c1c71695b80e443f08
|
refs/heads/master
| 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 |
Apache-2.0
| 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null |
UTF-8
|
Python
| false | false | 90,709 |
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
__all__ = [
'AddRemoveReplicaScalingMechanismResponse',
'ApplicationScopedVolumeCreationParametersServiceFabricVolumeDiskResponse',
'ApplicationScopedVolumeResponse',
'AutoScalingPolicyResponse',
'AutoScalingResourceMetricResponse',
'AverageLoadScalingTriggerResponse',
'AzureInternalMonitoringPipelineSinkDescriptionResponse',
'ContainerCodePackagePropertiesResponse',
'ContainerEventResponse',
'ContainerInstanceViewResponse',
'ContainerLabelResponse',
'ContainerStateResponse',
'DiagnosticsDescriptionResponse',
'DiagnosticsRefResponse',
'EndpointPropertiesResponse',
'EndpointRefResponse',
'EnvironmentVariableResponse',
'GatewayDestinationResponse',
'HttpConfigResponse',
'HttpHostConfigResponse',
'HttpRouteConfigResponse',
'HttpRouteMatchHeaderResponse',
'HttpRouteMatchPathResponse',
'HttpRouteMatchRuleResponse',
'ImageRegistryCredentialResponse',
'NetworkRefResponse',
'NetworkResourcePropertiesResponse',
'ReliableCollectionsRefResponse',
'ResourceLimitsResponse',
'ResourceRequestsResponse',
'ResourceRequirementsResponse',
'SecretResourcePropertiesResponse',
'ServiceResourceDescriptionResponse',
'SettingResponse',
'TcpConfigResponse',
'VolumeProviderParametersAzureFileResponse',
'VolumeReferenceResponse',
]
@pulumi.output_type
class AddRemoveReplicaScalingMechanismResponse(dict):
"""
Describes the horizontal auto scaling mechanism that adds or removes replicas (containers or container groups).
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "maxCount":
suggest = "max_count"
elif key == "minCount":
suggest = "min_count"
elif key == "scaleIncrement":
suggest = "scale_increment"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AddRemoveReplicaScalingMechanismResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AddRemoveReplicaScalingMechanismResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AddRemoveReplicaScalingMechanismResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kind: str,
max_count: int,
min_count: int,
scale_increment: int):
"""
Describes the horizontal auto scaling mechanism that adds or removes replicas (containers or container groups).
:param str kind: Enumerates the mechanisms for auto scaling.
Expected value is 'AddRemoveReplica'.
:param int max_count: Maximum number of containers (scale up won't be performed above this number).
:param int min_count: Minimum number of containers (scale down won't be performed below this number).
:param int scale_increment: Each time auto scaling is performed, this number of containers will be added or removed.
"""
pulumi.set(__self__, "kind", 'AddRemoveReplica')
pulumi.set(__self__, "max_count", max_count)
pulumi.set(__self__, "min_count", min_count)
pulumi.set(__self__, "scale_increment", scale_increment)
@property
@pulumi.getter
def kind(self) -> str:
"""
Enumerates the mechanisms for auto scaling.
Expected value is 'AddRemoveReplica'.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="maxCount")
def max_count(self) -> int:
"""
Maximum number of containers (scale up won't be performed above this number).
"""
return pulumi.get(self, "max_count")
@property
@pulumi.getter(name="minCount")
def min_count(self) -> int:
"""
Minimum number of containers (scale down won't be performed below this number).
"""
return pulumi.get(self, "min_count")
@property
@pulumi.getter(name="scaleIncrement")
def scale_increment(self) -> int:
"""
Each time auto scaling is performed, this number of containers will be added or removed.
"""
return pulumi.get(self, "scale_increment")
@pulumi.output_type
class ApplicationScopedVolumeCreationParametersServiceFabricVolumeDiskResponse(dict):
"""
Describes parameters for creating application-scoped volumes provided by Service Fabric Volume Disks
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sizeDisk":
suggest = "size_disk"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ApplicationScopedVolumeCreationParametersServiceFabricVolumeDiskResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ApplicationScopedVolumeCreationParametersServiceFabricVolumeDiskResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ApplicationScopedVolumeCreationParametersServiceFabricVolumeDiskResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kind: str,
size_disk: str,
description: Optional[str] = None):
"""
Describes parameters for creating application-scoped volumes provided by Service Fabric Volume Disks
:param str kind: Specifies the application-scoped volume kind.
Expected value is 'ServiceFabricVolumeDisk'.
:param str size_disk: Volume size
:param str description: User readable description of the volume.
"""
pulumi.set(__self__, "kind", 'ServiceFabricVolumeDisk')
pulumi.set(__self__, "size_disk", size_disk)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def kind(self) -> str:
"""
Specifies the application-scoped volume kind.
Expected value is 'ServiceFabricVolumeDisk'.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="sizeDisk")
def size_disk(self) -> str:
"""
Volume size
"""
return pulumi.get(self, "size_disk")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
User readable description of the volume.
"""
return pulumi.get(self, "description")
@pulumi.output_type
class ApplicationScopedVolumeResponse(dict):
"""
Describes a volume whose lifetime is scoped to the application's lifetime.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "creationParameters":
suggest = "creation_parameters"
elif key == "destinationPath":
suggest = "destination_path"
elif key == "readOnly":
suggest = "read_only"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ApplicationScopedVolumeResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ApplicationScopedVolumeResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ApplicationScopedVolumeResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
creation_parameters: 'outputs.ApplicationScopedVolumeCreationParametersServiceFabricVolumeDiskResponse',
destination_path: str,
name: str,
read_only: Optional[bool] = None):
"""
Describes a volume whose lifetime is scoped to the application's lifetime.
:param 'ApplicationScopedVolumeCreationParametersServiceFabricVolumeDiskResponse' creation_parameters: Describes parameters for creating application-scoped volumes.
:param str destination_path: The path within the container at which the volume should be mounted. Only valid path characters are allowed.
:param str name: Name of the volume being referenced.
:param bool read_only: The flag indicating whether the volume is read only. Default is 'false'.
"""
pulumi.set(__self__, "creation_parameters", creation_parameters)
pulumi.set(__self__, "destination_path", destination_path)
pulumi.set(__self__, "name", name)
if read_only is not None:
pulumi.set(__self__, "read_only", read_only)
@property
@pulumi.getter(name="creationParameters")
def creation_parameters(self) -> 'outputs.ApplicationScopedVolumeCreationParametersServiceFabricVolumeDiskResponse':
"""
Describes parameters for creating application-scoped volumes.
"""
return pulumi.get(self, "creation_parameters")
@property
@pulumi.getter(name="destinationPath")
def destination_path(self) -> str:
"""
The path within the container at which the volume should be mounted. Only valid path characters are allowed.
"""
return pulumi.get(self, "destination_path")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the volume being referenced.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="readOnly")
def read_only(self) -> Optional[bool]:
"""
The flag indicating whether the volume is read only. Default is 'false'.
"""
return pulumi.get(self, "read_only")
@pulumi.output_type
class AutoScalingPolicyResponse(dict):
"""
Describes the auto scaling policy
"""
def __init__(__self__, *,
mechanism: 'outputs.AddRemoveReplicaScalingMechanismResponse',
name: str,
trigger: 'outputs.AverageLoadScalingTriggerResponse'):
"""
Describes the auto scaling policy
:param 'AddRemoveReplicaScalingMechanismResponse' mechanism: The mechanism that is used to scale when auto scaling operation is invoked.
:param str name: The name of the auto scaling policy.
:param 'AverageLoadScalingTriggerResponse' trigger: Determines when auto scaling operation will be invoked.
"""
pulumi.set(__self__, "mechanism", mechanism)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "trigger", trigger)
@property
@pulumi.getter
def mechanism(self) -> 'outputs.AddRemoveReplicaScalingMechanismResponse':
"""
The mechanism that is used to scale when auto scaling operation is invoked.
"""
return pulumi.get(self, "mechanism")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the auto scaling policy.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def trigger(self) -> 'outputs.AverageLoadScalingTriggerResponse':
"""
Determines when auto scaling operation will be invoked.
"""
return pulumi.get(self, "trigger")
@pulumi.output_type
class AutoScalingResourceMetricResponse(dict):
"""
Describes the resource that is used for triggering auto scaling.
"""
def __init__(__self__, *,
kind: str,
name: str):
"""
Describes the resource that is used for triggering auto scaling.
:param str kind: Enumerates the metrics that are used for triggering auto scaling.
Expected value is 'Resource'.
:param str name: Name of the resource.
"""
pulumi.set(__self__, "kind", 'Resource')
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def kind(self) -> str:
"""
Enumerates the metrics that are used for triggering auto scaling.
Expected value is 'Resource'.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the resource.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class AverageLoadScalingTriggerResponse(dict):
"""
Describes the average load trigger used for auto scaling.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "lowerLoadThreshold":
suggest = "lower_load_threshold"
elif key == "scaleIntervalInSeconds":
suggest = "scale_interval_in_seconds"
elif key == "upperLoadThreshold":
suggest = "upper_load_threshold"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AverageLoadScalingTriggerResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AverageLoadScalingTriggerResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AverageLoadScalingTriggerResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kind: str,
lower_load_threshold: float,
metric: 'outputs.AutoScalingResourceMetricResponse',
scale_interval_in_seconds: int,
upper_load_threshold: float):
"""
Describes the average load trigger used for auto scaling.
:param str kind: Enumerates the triggers for auto scaling.
Expected value is 'AverageLoad'.
:param float lower_load_threshold: Lower load threshold (if average load is below this threshold, service will scale down).
:param 'AutoScalingResourceMetricResponse' metric: Description of the metric that is used for scaling.
:param int scale_interval_in_seconds: Scale interval that indicates how often will this trigger be checked.
:param float upper_load_threshold: Upper load threshold (if average load is above this threshold, service will scale up).
"""
pulumi.set(__self__, "kind", 'AverageLoad')
pulumi.set(__self__, "lower_load_threshold", lower_load_threshold)
pulumi.set(__self__, "metric", metric)
pulumi.set(__self__, "scale_interval_in_seconds", scale_interval_in_seconds)
pulumi.set(__self__, "upper_load_threshold", upper_load_threshold)
@property
@pulumi.getter
def kind(self) -> str:
"""
Enumerates the triggers for auto scaling.
Expected value is 'AverageLoad'.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="lowerLoadThreshold")
def lower_load_threshold(self) -> float:
"""
Lower load threshold (if average load is below this threshold, service will scale down).
"""
return pulumi.get(self, "lower_load_threshold")
@property
@pulumi.getter
def metric(self) -> 'outputs.AutoScalingResourceMetricResponse':
"""
Description of the metric that is used for scaling.
"""
return pulumi.get(self, "metric")
@property
@pulumi.getter(name="scaleIntervalInSeconds")
def scale_interval_in_seconds(self) -> int:
"""
Scale interval that indicates how often will this trigger be checked.
"""
return pulumi.get(self, "scale_interval_in_seconds")
@property
@pulumi.getter(name="upperLoadThreshold")
def upper_load_threshold(self) -> float:
"""
Upper load threshold (if average load is above this threshold, service will scale up).
"""
return pulumi.get(self, "upper_load_threshold")
@pulumi.output_type
class AzureInternalMonitoringPipelineSinkDescriptionResponse(dict):
"""
Diagnostics settings for Geneva.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "accountName":
suggest = "account_name"
elif key == "autoKeyConfigUrl":
suggest = "auto_key_config_url"
elif key == "fluentdConfigUrl":
suggest = "fluentd_config_url"
elif key == "maConfigUrl":
suggest = "ma_config_url"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureInternalMonitoringPipelineSinkDescriptionResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureInternalMonitoringPipelineSinkDescriptionResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureInternalMonitoringPipelineSinkDescriptionResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kind: str,
account_name: Optional[str] = None,
auto_key_config_url: Optional[str] = None,
description: Optional[str] = None,
fluentd_config_url: Optional[Any] = None,
ma_config_url: Optional[str] = None,
name: Optional[str] = None,
namespace: Optional[str] = None):
"""
Diagnostics settings for Geneva.
:param str kind: The kind of DiagnosticsSink.
Expected value is 'AzureInternalMonitoringPipeline'.
:param str account_name: Azure Internal monitoring pipeline account.
:param str auto_key_config_url: Azure Internal monitoring pipeline autokey associated with the certificate.
:param str description: A description of the sink.
:param Any fluentd_config_url: Azure Internal monitoring agent fluentd configuration.
:param str ma_config_url: Azure Internal monitoring agent configuration.
:param str name: Name of the sink. This value is referenced by DiagnosticsReferenceDescription
:param str namespace: Azure Internal monitoring pipeline account namespace.
"""
pulumi.set(__self__, "kind", 'AzureInternalMonitoringPipeline')
if account_name is not None:
pulumi.set(__self__, "account_name", account_name)
if auto_key_config_url is not None:
pulumi.set(__self__, "auto_key_config_url", auto_key_config_url)
if description is not None:
pulumi.set(__self__, "description", description)
if fluentd_config_url is not None:
pulumi.set(__self__, "fluentd_config_url", fluentd_config_url)
if ma_config_url is not None:
pulumi.set(__self__, "ma_config_url", ma_config_url)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
@property
@pulumi.getter
def kind(self) -> str:
"""
The kind of DiagnosticsSink.
Expected value is 'AzureInternalMonitoringPipeline'.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="accountName")
def account_name(self) -> Optional[str]:
"""
Azure Internal monitoring pipeline account.
"""
return pulumi.get(self, "account_name")
@property
@pulumi.getter(name="autoKeyConfigUrl")
def auto_key_config_url(self) -> Optional[str]:
"""
Azure Internal monitoring pipeline autokey associated with the certificate.
"""
return pulumi.get(self, "auto_key_config_url")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
A description of the sink.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="fluentdConfigUrl")
def fluentd_config_url(self) -> Optional[Any]:
"""
Azure Internal monitoring agent fluentd configuration.
"""
return pulumi.get(self, "fluentd_config_url")
@property
@pulumi.getter(name="maConfigUrl")
def ma_config_url(self) -> Optional[str]:
"""
Azure Internal monitoring agent configuration.
"""
return pulumi.get(self, "ma_config_url")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the sink. This value is referenced by DiagnosticsReferenceDescription
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def namespace(self) -> Optional[str]:
"""
Azure Internal monitoring pipeline account namespace.
"""
return pulumi.get(self, "namespace")
@pulumi.output_type
class ContainerCodePackagePropertiesResponse(dict):
"""
Describes a container and its runtime properties.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "instanceView":
suggest = "instance_view"
elif key == "environmentVariables":
suggest = "environment_variables"
elif key == "imageRegistryCredential":
suggest = "image_registry_credential"
elif key == "reliableCollectionsRefs":
suggest = "reliable_collections_refs"
elif key == "volumeRefs":
suggest = "volume_refs"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ContainerCodePackagePropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ContainerCodePackagePropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ContainerCodePackagePropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
image: str,
instance_view: 'outputs.ContainerInstanceViewResponse',
name: str,
resources: 'outputs.ResourceRequirementsResponse',
commands: Optional[Sequence[str]] = None,
diagnostics: Optional['outputs.DiagnosticsRefResponse'] = None,
endpoints: Optional[Sequence['outputs.EndpointPropertiesResponse']] = None,
entrypoint: Optional[str] = None,
environment_variables: Optional[Sequence['outputs.EnvironmentVariableResponse']] = None,
image_registry_credential: Optional['outputs.ImageRegistryCredentialResponse'] = None,
labels: Optional[Sequence['outputs.ContainerLabelResponse']] = None,
reliable_collections_refs: Optional[Sequence['outputs.ReliableCollectionsRefResponse']] = None,
settings: Optional[Sequence['outputs.SettingResponse']] = None,
volume_refs: Optional[Sequence['outputs.VolumeReferenceResponse']] = None,
volumes: Optional[Sequence['outputs.ApplicationScopedVolumeResponse']] = None):
"""
Describes a container and its runtime properties.
:param str image: The Container image to use.
:param 'ContainerInstanceViewResponse' instance_view: Runtime information of a container instance.
:param str name: The name of the code package.
:param 'ResourceRequirementsResponse' resources: The resources required by this container.
:param Sequence[str] commands: Command array to execute within the container in exec form.
:param 'DiagnosticsRefResponse' diagnostics: Reference to sinks in DiagnosticsDescription.
:param Sequence['EndpointPropertiesResponse'] endpoints: The endpoints exposed by this container.
:param str entrypoint: Override for the default entry point in the container.
:param Sequence['EnvironmentVariableResponse'] environment_variables: The environment variables to set in this container
:param 'ImageRegistryCredentialResponse' image_registry_credential: Image registry credential.
:param Sequence['ContainerLabelResponse'] labels: The labels to set in this container.
:param Sequence['ReliableCollectionsRefResponse'] reliable_collections_refs: A list of ReliableCollection resources used by this particular code package. Please refer to ReliableCollectionsRef for more details.
:param Sequence['SettingResponse'] settings: The settings to set in this container. The setting file path can be fetched from environment variable "Fabric_SettingPath". The path for Windows container is "C:\\secrets". The path for Linux container is "/var/secrets".
:param Sequence['VolumeReferenceResponse'] volume_refs: Volumes to be attached to the container. The lifetime of these volumes is independent of the application's lifetime.
:param Sequence['ApplicationScopedVolumeResponse'] volumes: Volumes to be attached to the container. The lifetime of these volumes is scoped to the application's lifetime.
"""
pulumi.set(__self__, "image", image)
pulumi.set(__self__, "instance_view", instance_view)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "resources", resources)
if commands is not None:
pulumi.set(__self__, "commands", commands)
if diagnostics is not None:
pulumi.set(__self__, "diagnostics", diagnostics)
if endpoints is not None:
pulumi.set(__self__, "endpoints", endpoints)
if entrypoint is not None:
pulumi.set(__self__, "entrypoint", entrypoint)
if environment_variables is not None:
pulumi.set(__self__, "environment_variables", environment_variables)
if image_registry_credential is not None:
pulumi.set(__self__, "image_registry_credential", image_registry_credential)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if reliable_collections_refs is not None:
pulumi.set(__self__, "reliable_collections_refs", reliable_collections_refs)
if settings is not None:
pulumi.set(__self__, "settings", settings)
if volume_refs is not None:
pulumi.set(__self__, "volume_refs", volume_refs)
if volumes is not None:
pulumi.set(__self__, "volumes", volumes)
@property
@pulumi.getter
def image(self) -> str:
"""
The Container image to use.
"""
return pulumi.get(self, "image")
@property
@pulumi.getter(name="instanceView")
def instance_view(self) -> 'outputs.ContainerInstanceViewResponse':
"""
Runtime information of a container instance.
"""
return pulumi.get(self, "instance_view")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the code package.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def resources(self) -> 'outputs.ResourceRequirementsResponse':
"""
The resources required by this container.
"""
return pulumi.get(self, "resources")
@property
@pulumi.getter
def commands(self) -> Optional[Sequence[str]]:
"""
Command array to execute within the container in exec form.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def diagnostics(self) -> Optional['outputs.DiagnosticsRefResponse']:
"""
Reference to sinks in DiagnosticsDescription.
"""
return pulumi.get(self, "diagnostics")
@property
@pulumi.getter
def endpoints(self) -> Optional[Sequence['outputs.EndpointPropertiesResponse']]:
"""
The endpoints exposed by this container.
"""
return pulumi.get(self, "endpoints")
@property
@pulumi.getter
def entrypoint(self) -> Optional[str]:
"""
Override for the default entry point in the container.
"""
return pulumi.get(self, "entrypoint")
@property
@pulumi.getter(name="environmentVariables")
def environment_variables(self) -> Optional[Sequence['outputs.EnvironmentVariableResponse']]:
"""
The environment variables to set in this container
"""
return pulumi.get(self, "environment_variables")
@property
@pulumi.getter(name="imageRegistryCredential")
def image_registry_credential(self) -> Optional['outputs.ImageRegistryCredentialResponse']:
"""
Image registry credential.
"""
return pulumi.get(self, "image_registry_credential")
@property
@pulumi.getter
def labels(self) -> Optional[Sequence['outputs.ContainerLabelResponse']]:
"""
The labels to set in this container.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="reliableCollectionsRefs")
def reliable_collections_refs(self) -> Optional[Sequence['outputs.ReliableCollectionsRefResponse']]:
"""
A list of ReliableCollection resources used by this particular code package. Please refer to ReliableCollectionsRef for more details.
"""
return pulumi.get(self, "reliable_collections_refs")
@property
@pulumi.getter
def settings(self) -> Optional[Sequence['outputs.SettingResponse']]:
"""
The settings to set in this container. The setting file path can be fetched from environment variable "Fabric_SettingPath". The path for Windows container is "C:\\secrets". The path for Linux container is "/var/secrets".
"""
return pulumi.get(self, "settings")
@property
@pulumi.getter(name="volumeRefs")
def volume_refs(self) -> Optional[Sequence['outputs.VolumeReferenceResponse']]:
"""
Volumes to be attached to the container. The lifetime of these volumes is independent of the application's lifetime.
"""
return pulumi.get(self, "volume_refs")
@property
@pulumi.getter
def volumes(self) -> Optional[Sequence['outputs.ApplicationScopedVolumeResponse']]:
"""
Volumes to be attached to the container. The lifetime of these volumes is scoped to the application's lifetime.
"""
return pulumi.get(self, "volumes")
@pulumi.output_type
class ContainerEventResponse(dict):
"""
A container event.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "firstTimestamp":
suggest = "first_timestamp"
elif key == "lastTimestamp":
suggest = "last_timestamp"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ContainerEventResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ContainerEventResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ContainerEventResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
count: Optional[int] = None,
first_timestamp: Optional[str] = None,
last_timestamp: Optional[str] = None,
message: Optional[str] = None,
name: Optional[str] = None,
type: Optional[str] = None):
"""
A container event.
:param int count: The count of the event.
:param str first_timestamp: Date/time of the first event.
:param str last_timestamp: Date/time of the last event.
:param str message: The event message
:param str name: The name of the container event.
:param str type: The event type.
"""
if count is not None:
pulumi.set(__self__, "count", count)
if first_timestamp is not None:
pulumi.set(__self__, "first_timestamp", first_timestamp)
if last_timestamp is not None:
pulumi.set(__self__, "last_timestamp", last_timestamp)
if message is not None:
pulumi.set(__self__, "message", message)
if name is not None:
pulumi.set(__self__, "name", name)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def count(self) -> Optional[int]:
"""
The count of the event.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter(name="firstTimestamp")
def first_timestamp(self) -> Optional[str]:
"""
Date/time of the first event.
"""
return pulumi.get(self, "first_timestamp")
@property
@pulumi.getter(name="lastTimestamp")
def last_timestamp(self) -> Optional[str]:
"""
Date/time of the last event.
"""
return pulumi.get(self, "last_timestamp")
@property
@pulumi.getter
def message(self) -> Optional[str]:
"""
The event message
"""
return pulumi.get(self, "message")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the container event.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The event type.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class ContainerInstanceViewResponse(dict):
"""
Runtime information of a container instance.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "currentState":
suggest = "current_state"
elif key == "previousState":
suggest = "previous_state"
elif key == "restartCount":
suggest = "restart_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ContainerInstanceViewResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ContainerInstanceViewResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ContainerInstanceViewResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
current_state: Optional['outputs.ContainerStateResponse'] = None,
events: Optional[Sequence['outputs.ContainerEventResponse']] = None,
previous_state: Optional['outputs.ContainerStateResponse'] = None,
restart_count: Optional[int] = None):
"""
Runtime information of a container instance.
:param 'ContainerStateResponse' current_state: Current container instance state.
:param Sequence['ContainerEventResponse'] events: The events of this container instance.
:param 'ContainerStateResponse' previous_state: Previous container instance state.
:param int restart_count: The number of times the container has been restarted.
"""
if current_state is not None:
pulumi.set(__self__, "current_state", current_state)
if events is not None:
pulumi.set(__self__, "events", events)
if previous_state is not None:
pulumi.set(__self__, "previous_state", previous_state)
if restart_count is not None:
pulumi.set(__self__, "restart_count", restart_count)
@property
@pulumi.getter(name="currentState")
def current_state(self) -> Optional['outputs.ContainerStateResponse']:
"""
Current container instance state.
"""
return pulumi.get(self, "current_state")
@property
@pulumi.getter
def events(self) -> Optional[Sequence['outputs.ContainerEventResponse']]:
"""
The events of this container instance.
"""
return pulumi.get(self, "events")
@property
@pulumi.getter(name="previousState")
def previous_state(self) -> Optional['outputs.ContainerStateResponse']:
"""
Previous container instance state.
"""
return pulumi.get(self, "previous_state")
@property
@pulumi.getter(name="restartCount")
def restart_count(self) -> Optional[int]:
"""
The number of times the container has been restarted.
"""
return pulumi.get(self, "restart_count")
@pulumi.output_type
class ContainerLabelResponse(dict):
"""
Describes a container label.
"""
def __init__(__self__, *,
name: str,
value: str):
"""
Describes a container label.
:param str name: The name of the container label.
:param str value: The value of the container label.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the container label.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> str:
"""
The value of the container label.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class ContainerStateResponse(dict):
"""
The container state.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "detailStatus":
suggest = "detail_status"
elif key == "exitCode":
suggest = "exit_code"
elif key == "finishTime":
suggest = "finish_time"
elif key == "startTime":
suggest = "start_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ContainerStateResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ContainerStateResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ContainerStateResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
detail_status: Optional[str] = None,
exit_code: Optional[str] = None,
finish_time: Optional[str] = None,
start_time: Optional[str] = None,
state: Optional[str] = None):
"""
The container state.
:param str detail_status: Human-readable status of this state.
:param str exit_code: The container exit code.
:param str finish_time: Date/time when the container state finished.
:param str start_time: Date/time when the container state started.
:param str state: The state of this container
"""
if detail_status is not None:
pulumi.set(__self__, "detail_status", detail_status)
if exit_code is not None:
pulumi.set(__self__, "exit_code", exit_code)
if finish_time is not None:
pulumi.set(__self__, "finish_time", finish_time)
if start_time is not None:
pulumi.set(__self__, "start_time", start_time)
if state is not None:
pulumi.set(__self__, "state", state)
@property
@pulumi.getter(name="detailStatus")
def detail_status(self) -> Optional[str]:
"""
Human-readable status of this state.
"""
return pulumi.get(self, "detail_status")
@property
@pulumi.getter(name="exitCode")
def exit_code(self) -> Optional[str]:
"""
The container exit code.
"""
return pulumi.get(self, "exit_code")
@property
@pulumi.getter(name="finishTime")
def finish_time(self) -> Optional[str]:
"""
Date/time when the container state finished.
"""
return pulumi.get(self, "finish_time")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> Optional[str]:
"""
Date/time when the container state started.
"""
return pulumi.get(self, "start_time")
@property
@pulumi.getter
def state(self) -> Optional[str]:
"""
The state of this container
"""
return pulumi.get(self, "state")
@pulumi.output_type
class DiagnosticsDescriptionResponse(dict):
"""
Describes the diagnostics options available
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "defaultSinkRefs":
suggest = "default_sink_refs"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DiagnosticsDescriptionResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DiagnosticsDescriptionResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DiagnosticsDescriptionResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
default_sink_refs: Optional[Sequence[str]] = None,
enabled: Optional[bool] = None,
sinks: Optional[Sequence['outputs.AzureInternalMonitoringPipelineSinkDescriptionResponse']] = None):
"""
Describes the diagnostics options available
:param Sequence[str] default_sink_refs: The sinks to be used if diagnostics is enabled. Sink choices can be overridden at the service and code package level.
:param bool enabled: Status of whether or not sinks are enabled.
:param Sequence['AzureInternalMonitoringPipelineSinkDescriptionResponse'] sinks: List of supported sinks that can be referenced.
"""
if default_sink_refs is not None:
pulumi.set(__self__, "default_sink_refs", default_sink_refs)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if sinks is not None:
pulumi.set(__self__, "sinks", sinks)
@property
@pulumi.getter(name="defaultSinkRefs")
def default_sink_refs(self) -> Optional[Sequence[str]]:
"""
The sinks to be used if diagnostics is enabled. Sink choices can be overridden at the service and code package level.
"""
return pulumi.get(self, "default_sink_refs")
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
Status of whether or not sinks are enabled.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def sinks(self) -> Optional[Sequence['outputs.AzureInternalMonitoringPipelineSinkDescriptionResponse']]:
"""
List of supported sinks that can be referenced.
"""
return pulumi.get(self, "sinks")
@pulumi.output_type
class DiagnosticsRefResponse(dict):
"""
Reference to sinks in DiagnosticsDescription.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sinkRefs":
suggest = "sink_refs"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DiagnosticsRefResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DiagnosticsRefResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DiagnosticsRefResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enabled: Optional[bool] = None,
sink_refs: Optional[Sequence[str]] = None):
"""
Reference to sinks in DiagnosticsDescription.
:param bool enabled: Status of whether or not sinks are enabled.
:param Sequence[str] sink_refs: List of sinks to be used if enabled. References the list of sinks in DiagnosticsDescription.
"""
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if sink_refs is not None:
pulumi.set(__self__, "sink_refs", sink_refs)
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
Status of whether or not sinks are enabled.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="sinkRefs")
def sink_refs(self) -> Optional[Sequence[str]]:
"""
List of sinks to be used if enabled. References the list of sinks in DiagnosticsDescription.
"""
return pulumi.get(self, "sink_refs")
@pulumi.output_type
class EndpointPropertiesResponse(dict):
"""
Describes a container endpoint.
"""
def __init__(__self__, *,
name: str,
port: Optional[int] = None):
"""
Describes a container endpoint.
:param str name: The name of the endpoint.
:param int port: Port used by the container.
"""
pulumi.set(__self__, "name", name)
if port is not None:
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the endpoint.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> Optional[int]:
"""
Port used by the container.
"""
return pulumi.get(self, "port")
@pulumi.output_type
class EndpointRefResponse(dict):
"""
Describes a reference to a service endpoint.
"""
def __init__(__self__, *,
name: Optional[str] = None):
"""
Describes a reference to a service endpoint.
:param str name: Name of the endpoint.
"""
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the endpoint.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class EnvironmentVariableResponse(dict):
"""
Describes an environment variable for the container.
"""
def __init__(__self__, *,
name: Optional[str] = None,
value: Optional[str] = None):
"""
Describes an environment variable for the container.
:param str name: The name of the environment variable.
:param str value: The value of the environment variable.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the environment variable.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> Optional[str]:
"""
The value of the environment variable.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class GatewayDestinationResponse(dict):
"""
Describes destination endpoint for routing traffic.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "applicationName":
suggest = "application_name"
elif key == "endpointName":
suggest = "endpoint_name"
elif key == "serviceName":
suggest = "service_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GatewayDestinationResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GatewayDestinationResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GatewayDestinationResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
application_name: str,
endpoint_name: str,
service_name: str):
"""
Describes destination endpoint for routing traffic.
:param str application_name: Name of the service fabric Mesh application.
:param str endpoint_name: name of the endpoint in the service.
:param str service_name: service that contains the endpoint.
"""
pulumi.set(__self__, "application_name", application_name)
pulumi.set(__self__, "endpoint_name", endpoint_name)
pulumi.set(__self__, "service_name", service_name)
@property
@pulumi.getter(name="applicationName")
def application_name(self) -> str:
"""
Name of the service fabric Mesh application.
"""
return pulumi.get(self, "application_name")
@property
@pulumi.getter(name="endpointName")
def endpoint_name(self) -> str:
"""
name of the endpoint in the service.
"""
return pulumi.get(self, "endpoint_name")
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> str:
"""
service that contains the endpoint.
"""
return pulumi.get(self, "service_name")
@pulumi.output_type
class HttpConfigResponse(dict):
"""
Describes the http configuration for external connectivity for this network.
"""
def __init__(__self__, *,
hosts: Sequence['outputs.HttpHostConfigResponse'],
name: str,
port: int):
"""
Describes the http configuration for external connectivity for this network.
:param Sequence['HttpHostConfigResponse'] hosts: description for routing.
:param str name: http gateway config name.
:param int port: Specifies the port at which the service endpoint below needs to be exposed.
"""
pulumi.set(__self__, "hosts", hosts)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def hosts(self) -> Sequence['outputs.HttpHostConfigResponse']:
"""
description for routing.
"""
return pulumi.get(self, "hosts")
@property
@pulumi.getter
def name(self) -> str:
"""
http gateway config name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> int:
"""
Specifies the port at which the service endpoint below needs to be exposed.
"""
return pulumi.get(self, "port")
@pulumi.output_type
class HttpHostConfigResponse(dict):
"""
Describes the hostname properties for http routing.
"""
def __init__(__self__, *,
name: str,
routes: Sequence['outputs.HttpRouteConfigResponse']):
"""
Describes the hostname properties for http routing.
:param str name: http hostname config name.
:param Sequence['HttpRouteConfigResponse'] routes: Route information to use for routing. Routes are processed in the order they are specified. Specify routes that are more specific before routes that can handle general cases.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "routes", routes)
@property
@pulumi.getter
def name(self) -> str:
"""
http hostname config name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def routes(self) -> Sequence['outputs.HttpRouteConfigResponse']:
"""
Route information to use for routing. Routes are processed in the order they are specified. Specify routes that are more specific before routes that can handle general cases.
"""
return pulumi.get(self, "routes")
@pulumi.output_type
class HttpRouteConfigResponse(dict):
"""
Describes the hostname properties for http routing.
"""
def __init__(__self__, *,
destination: 'outputs.GatewayDestinationResponse',
match: 'outputs.HttpRouteMatchRuleResponse',
name: str):
"""
Describes the hostname properties for http routing.
:param 'GatewayDestinationResponse' destination: Describes destination endpoint for routing traffic.
:param 'HttpRouteMatchRuleResponse' match: Describes a rule for http route matching.
:param str name: http route name.
"""
pulumi.set(__self__, "destination", destination)
pulumi.set(__self__, "match", match)
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def destination(self) -> 'outputs.GatewayDestinationResponse':
"""
Describes destination endpoint for routing traffic.
"""
return pulumi.get(self, "destination")
@property
@pulumi.getter
def match(self) -> 'outputs.HttpRouteMatchRuleResponse':
"""
Describes a rule for http route matching.
"""
return pulumi.get(self, "match")
@property
@pulumi.getter
def name(self) -> str:
"""
http route name.
"""
return pulumi.get(self, "name")
@pulumi.output_type
class HttpRouteMatchHeaderResponse(dict):
"""
Describes header information for http route matching.
"""
def __init__(__self__, *,
name: str,
type: Optional[str] = None,
value: Optional[str] = None):
"""
Describes header information for http route matching.
:param str name: Name of header to match in request.
:param str type: how to match header value
:param str value: Value of header to match in request.
"""
pulumi.set(__self__, "name", name)
if type is not None:
pulumi.set(__self__, "type", type)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> str:
"""
Name of header to match in request.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
how to match header value
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def value(self) -> Optional[str]:
"""
Value of header to match in request.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class HttpRouteMatchPathResponse(dict):
"""
Path to match for routing.
"""
def __init__(__self__, *,
type: str,
value: str,
rewrite: Optional[str] = None):
"""
Path to match for routing.
:param str type: how to match value in the Uri
:param str value: Uri path to match for request.
:param str rewrite: replacement string for matched part of the Uri.
"""
pulumi.set(__self__, "type", type)
pulumi.set(__self__, "value", value)
if rewrite is not None:
pulumi.set(__self__, "rewrite", rewrite)
@property
@pulumi.getter
def type(self) -> str:
"""
how to match value in the Uri
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def value(self) -> str:
"""
Uri path to match for request.
"""
return pulumi.get(self, "value")
@property
@pulumi.getter
def rewrite(self) -> Optional[str]:
"""
replacement string for matched part of the Uri.
"""
return pulumi.get(self, "rewrite")
@pulumi.output_type
class HttpRouteMatchRuleResponse(dict):
"""
Describes a rule for http route matching.
"""
def __init__(__self__, *,
path: 'outputs.HttpRouteMatchPathResponse',
headers: Optional[Sequence['outputs.HttpRouteMatchHeaderResponse']] = None):
"""
Describes a rule for http route matching.
:param 'HttpRouteMatchPathResponse' path: Path to match for routing.
:param Sequence['HttpRouteMatchHeaderResponse'] headers: headers and their values to match in request.
"""
pulumi.set(__self__, "path", path)
if headers is not None:
pulumi.set(__self__, "headers", headers)
@property
@pulumi.getter
def path(self) -> 'outputs.HttpRouteMatchPathResponse':
"""
Path to match for routing.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def headers(self) -> Optional[Sequence['outputs.HttpRouteMatchHeaderResponse']]:
"""
headers and their values to match in request.
"""
return pulumi.get(self, "headers")
@pulumi.output_type
class ImageRegistryCredentialResponse(dict):
"""
Image registry credential.
"""
def __init__(__self__, *,
server: str,
username: str,
password: Optional[str] = None):
"""
Image registry credential.
:param str server: Docker image registry server, without protocol such as `http` and `https`.
:param str username: The username for the private registry.
:param str password: The password for the private registry. The password is required for create or update operations, however it is not returned in the get or list operations.
"""
pulumi.set(__self__, "server", server)
pulumi.set(__self__, "username", username)
if password is not None:
pulumi.set(__self__, "password", password)
@property
@pulumi.getter
def server(self) -> str:
"""
Docker image registry server, without protocol such as `http` and `https`.
"""
return pulumi.get(self, "server")
@property
@pulumi.getter
def username(self) -> str:
"""
The username for the private registry.
"""
return pulumi.get(self, "username")
@property
@pulumi.getter
def password(self) -> Optional[str]:
"""
The password for the private registry. The password is required for create or update operations, however it is not returned in the get or list operations.
"""
return pulumi.get(self, "password")
@pulumi.output_type
class NetworkRefResponse(dict):
"""
Describes a network reference in a service.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endpointRefs":
suggest = "endpoint_refs"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NetworkRefResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NetworkRefResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NetworkRefResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
endpoint_refs: Optional[Sequence['outputs.EndpointRefResponse']] = None,
name: Optional[str] = None):
"""
Describes a network reference in a service.
:param Sequence['EndpointRefResponse'] endpoint_refs: A list of endpoints that are exposed on this network.
:param str name: Name of the network
"""
if endpoint_refs is not None:
pulumi.set(__self__, "endpoint_refs", endpoint_refs)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="endpointRefs")
def endpoint_refs(self) -> Optional[Sequence['outputs.EndpointRefResponse']]:
"""
A list of endpoints that are exposed on this network.
"""
return pulumi.get(self, "endpoint_refs")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the network
"""
return pulumi.get(self, "name")
@pulumi.output_type
class NetworkResourcePropertiesResponse(dict):
"""
Describes properties of a network resource.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "provisioningState":
suggest = "provisioning_state"
elif key == "statusDetails":
suggest = "status_details"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NetworkResourcePropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NetworkResourcePropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NetworkResourcePropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kind: str,
provisioning_state: str,
status: str,
status_details: str,
description: Optional[str] = None):
"""
Describes properties of a network resource.
:param str kind: The type of a Service Fabric container network.
Expected value is 'NetworkResourceProperties'.
:param str provisioning_state: State of the resource.
:param str status: Status of the network.
:param str status_details: Gives additional information about the current status of the network.
:param str description: User readable description of the network.
"""
pulumi.set(__self__, "kind", 'NetworkResourceProperties')
pulumi.set(__self__, "provisioning_state", provisioning_state)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "status_details", status_details)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def kind(self) -> str:
"""
The type of a Service Fabric container network.
Expected value is 'NetworkResourceProperties'.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
State of the resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def status(self) -> str:
"""
Status of the network.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="statusDetails")
def status_details(self) -> str:
"""
Gives additional information about the current status of the network.
"""
return pulumi.get(self, "status_details")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
User readable description of the network.
"""
return pulumi.get(self, "description")
@pulumi.output_type
class ReliableCollectionsRefResponse(dict):
"""
Specifying this parameter adds support for reliable collections
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "doNotPersistState":
suggest = "do_not_persist_state"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ReliableCollectionsRefResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ReliableCollectionsRefResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ReliableCollectionsRefResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
name: str,
do_not_persist_state: Optional[bool] = None):
"""
Specifying this parameter adds support for reliable collections
:param str name: Name of ReliableCollection resource. Right now it's not used and you can use any string.
:param bool do_not_persist_state: False (the default) if ReliableCollections state is persisted to disk as usual. True if you do not want to persist state, in which case replication is still enabled and you can use ReliableCollections as distributed cache.
"""
pulumi.set(__self__, "name", name)
if do_not_persist_state is not None:
pulumi.set(__self__, "do_not_persist_state", do_not_persist_state)
@property
@pulumi.getter
def name(self) -> str:
"""
Name of ReliableCollection resource. Right now it's not used and you can use any string.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="doNotPersistState")
def do_not_persist_state(self) -> Optional[bool]:
"""
False (the default) if ReliableCollections state is persisted to disk as usual. True if you do not want to persist state, in which case replication is still enabled and you can use ReliableCollections as distributed cache.
"""
return pulumi.get(self, "do_not_persist_state")
@pulumi.output_type
class ResourceLimitsResponse(dict):
"""
This type describes the resource limits for a given container. It describes the most amount of resources a container is allowed to use before being restarted.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "memoryInGB":
suggest = "memory_in_gb"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ResourceLimitsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ResourceLimitsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ResourceLimitsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cpu: Optional[float] = None,
memory_in_gb: Optional[float] = None):
"""
This type describes the resource limits for a given container. It describes the most amount of resources a container is allowed to use before being restarted.
:param float cpu: CPU limits in cores. At present, only full cores are supported.
:param float memory_in_gb: The memory limit in GB.
"""
if cpu is not None:
pulumi.set(__self__, "cpu", cpu)
if memory_in_gb is not None:
pulumi.set(__self__, "memory_in_gb", memory_in_gb)
@property
@pulumi.getter
def cpu(self) -> Optional[float]:
"""
CPU limits in cores. At present, only full cores are supported.
"""
return pulumi.get(self, "cpu")
@property
@pulumi.getter(name="memoryInGB")
def memory_in_gb(self) -> Optional[float]:
"""
The memory limit in GB.
"""
return pulumi.get(self, "memory_in_gb")
@pulumi.output_type
class ResourceRequestsResponse(dict):
"""
This type describes the requested resources for a given container. It describes the least amount of resources required for the container. A container can consume more than requested resources up to the specified limits before being restarted. Currently, the requested resources are treated as limits.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "memoryInGB":
suggest = "memory_in_gb"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ResourceRequestsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ResourceRequestsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ResourceRequestsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cpu: float,
memory_in_gb: float):
"""
This type describes the requested resources for a given container. It describes the least amount of resources required for the container. A container can consume more than requested resources up to the specified limits before being restarted. Currently, the requested resources are treated as limits.
:param float cpu: Requested number of CPU cores. At present, only full cores are supported.
:param float memory_in_gb: The memory request in GB for this container.
"""
pulumi.set(__self__, "cpu", cpu)
pulumi.set(__self__, "memory_in_gb", memory_in_gb)
@property
@pulumi.getter
def cpu(self) -> float:
"""
Requested number of CPU cores. At present, only full cores are supported.
"""
return pulumi.get(self, "cpu")
@property
@pulumi.getter(name="memoryInGB")
def memory_in_gb(self) -> float:
"""
The memory request in GB for this container.
"""
return pulumi.get(self, "memory_in_gb")
@pulumi.output_type
class ResourceRequirementsResponse(dict):
"""
This type describes the resource requirements for a container or a service.
"""
def __init__(__self__, *,
requests: 'outputs.ResourceRequestsResponse',
limits: Optional['outputs.ResourceLimitsResponse'] = None):
"""
This type describes the resource requirements for a container or a service.
:param 'ResourceRequestsResponse' requests: Describes the requested resources for a given container.
:param 'ResourceLimitsResponse' limits: Describes the maximum limits on the resources for a given container.
"""
pulumi.set(__self__, "requests", requests)
if limits is not None:
pulumi.set(__self__, "limits", limits)
@property
@pulumi.getter
def requests(self) -> 'outputs.ResourceRequestsResponse':
"""
Describes the requested resources for a given container.
"""
return pulumi.get(self, "requests")
@property
@pulumi.getter
def limits(self) -> Optional['outputs.ResourceLimitsResponse']:
"""
Describes the maximum limits on the resources for a given container.
"""
return pulumi.get(self, "limits")
@pulumi.output_type
class SecretResourcePropertiesResponse(dict):
"""
Describes the properties of a secret resource.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "provisioningState":
suggest = "provisioning_state"
elif key == "statusDetails":
suggest = "status_details"
elif key == "contentType":
suggest = "content_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SecretResourcePropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SecretResourcePropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SecretResourcePropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kind: str,
provisioning_state: str,
status: str,
status_details: str,
content_type: Optional[str] = None,
description: Optional[str] = None):
"""
Describes the properties of a secret resource.
:param str kind: Describes the kind of secret.
Expected value is 'SecretResourceProperties'.
:param str provisioning_state: State of the resource.
:param str status: Status of the resource.
:param str status_details: Gives additional information about the current status of the secret.
:param str content_type: The type of the content stored in the secret value. The value of this property is opaque to Service Fabric. Once set, the value of this property cannot be changed.
:param str description: User readable description of the secret.
"""
pulumi.set(__self__, "kind", 'SecretResourceProperties')
pulumi.set(__self__, "provisioning_state", provisioning_state)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "status_details", status_details)
if content_type is not None:
pulumi.set(__self__, "content_type", content_type)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter
def kind(self) -> str:
"""
Describes the kind of secret.
Expected value is 'SecretResourceProperties'.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
State of the resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def status(self) -> str:
"""
Status of the resource.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="statusDetails")
def status_details(self) -> str:
"""
Gives additional information about the current status of the secret.
"""
return pulumi.get(self, "status_details")
@property
@pulumi.getter(name="contentType")
def content_type(self) -> Optional[str]:
"""
The type of the content stored in the secret value. The value of this property is opaque to Service Fabric. Once set, the value of this property cannot be changed.
"""
return pulumi.get(self, "content_type")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
User readable description of the secret.
"""
return pulumi.get(self, "description")
@pulumi.output_type
class ServiceResourceDescriptionResponse(dict):
"""
This type describes a service resource.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "codePackages":
suggest = "code_packages"
elif key == "healthState":
suggest = "health_state"
elif key == "osType":
suggest = "os_type"
elif key == "provisioningState":
suggest = "provisioning_state"
elif key == "statusDetails":
suggest = "status_details"
elif key == "unhealthyEvaluation":
suggest = "unhealthy_evaluation"
elif key == "autoScalingPolicies":
suggest = "auto_scaling_policies"
elif key == "networkRefs":
suggest = "network_refs"
elif key == "replicaCount":
suggest = "replica_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ServiceResourceDescriptionResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ServiceResourceDescriptionResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ServiceResourceDescriptionResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
code_packages: Sequence['outputs.ContainerCodePackagePropertiesResponse'],
health_state: str,
id: str,
os_type: str,
provisioning_state: str,
status: str,
status_details: str,
type: str,
unhealthy_evaluation: str,
auto_scaling_policies: Optional[Sequence['outputs.AutoScalingPolicyResponse']] = None,
description: Optional[str] = None,
diagnostics: Optional['outputs.DiagnosticsRefResponse'] = None,
name: Optional[str] = None,
network_refs: Optional[Sequence['outputs.NetworkRefResponse']] = None,
replica_count: Optional[int] = None):
"""
This type describes a service resource.
:param Sequence['ContainerCodePackagePropertiesResponse'] code_packages: Describes the set of code packages that forms the service. A code package describes the container and the properties for running it. All the code packages are started together on the same host and share the same context (network, process etc.).
:param str health_state: Describes the health state of an application resource.
:param str id: Fully qualified identifier for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
:param str os_type: The operation system required by the code in service.
:param str provisioning_state: State of the resource.
:param str status: Status of the service.
:param str status_details: Gives additional information about the current status of the service.
:param str type: The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
:param str unhealthy_evaluation: When the service's health state is not 'Ok', this additional details from service fabric Health Manager for the user to know why the service is marked unhealthy.
:param Sequence['AutoScalingPolicyResponse'] auto_scaling_policies: Auto scaling policies
:param str description: User readable description of the service.
:param 'DiagnosticsRefResponse' diagnostics: Reference to sinks in DiagnosticsDescription.
:param str name: The name of the resource
:param Sequence['NetworkRefResponse'] network_refs: The names of the private networks that this service needs to be part of.
:param int replica_count: The number of replicas of the service to create. Defaults to 1 if not specified.
"""
pulumi.set(__self__, "code_packages", code_packages)
pulumi.set(__self__, "health_state", health_state)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "os_type", os_type)
pulumi.set(__self__, "provisioning_state", provisioning_state)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "status_details", status_details)
pulumi.set(__self__, "type", type)
pulumi.set(__self__, "unhealthy_evaluation", unhealthy_evaluation)
if auto_scaling_policies is not None:
pulumi.set(__self__, "auto_scaling_policies", auto_scaling_policies)
if description is not None:
pulumi.set(__self__, "description", description)
if diagnostics is not None:
pulumi.set(__self__, "diagnostics", diagnostics)
if name is not None:
pulumi.set(__self__, "name", name)
if network_refs is not None:
pulumi.set(__self__, "network_refs", network_refs)
if replica_count is not None:
pulumi.set(__self__, "replica_count", replica_count)
@property
@pulumi.getter(name="codePackages")
def code_packages(self) -> Sequence['outputs.ContainerCodePackagePropertiesResponse']:
"""
Describes the set of code packages that forms the service. A code package describes the container and the properties for running it. All the code packages are started together on the same host and share the same context (network, process etc.).
"""
return pulumi.get(self, "code_packages")
@property
@pulumi.getter(name="healthState")
def health_state(self) -> str:
"""
Describes the health state of an application resource.
"""
return pulumi.get(self, "health_state")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified identifier for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="osType")
def os_type(self) -> str:
"""
The operation system required by the code in service.
"""
return pulumi.get(self, "os_type")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
State of the resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def status(self) -> str:
"""
Status of the service.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="statusDetails")
def status_details(self) -> str:
"""
Gives additional information about the current status of the service.
"""
return pulumi.get(self, "status_details")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="unhealthyEvaluation")
def unhealthy_evaluation(self) -> str:
"""
When the service's health state is not 'Ok', this additional details from service fabric Health Manager for the user to know why the service is marked unhealthy.
"""
return pulumi.get(self, "unhealthy_evaluation")
@property
@pulumi.getter(name="autoScalingPolicies")
def auto_scaling_policies(self) -> Optional[Sequence['outputs.AutoScalingPolicyResponse']]:
"""
Auto scaling policies
"""
return pulumi.get(self, "auto_scaling_policies")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
User readable description of the service.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def diagnostics(self) -> Optional['outputs.DiagnosticsRefResponse']:
"""
Reference to sinks in DiagnosticsDescription.
"""
return pulumi.get(self, "diagnostics")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkRefs")
def network_refs(self) -> Optional[Sequence['outputs.NetworkRefResponse']]:
"""
The names of the private networks that this service needs to be part of.
"""
return pulumi.get(self, "network_refs")
@property
@pulumi.getter(name="replicaCount")
def replica_count(self) -> Optional[int]:
"""
The number of replicas of the service to create. Defaults to 1 if not specified.
"""
return pulumi.get(self, "replica_count")
@pulumi.output_type
class SettingResponse(dict):
"""
Describes a setting for the container. The setting file path can be fetched from environment variable "Fabric_SettingPath". The path for Windows container is "C:\\secrets". The path for Linux container is "/var/secrets".
"""
def __init__(__self__, *,
name: Optional[str] = None,
value: Optional[str] = None):
"""
Describes a setting for the container. The setting file path can be fetched from environment variable "Fabric_SettingPath". The path for Windows container is "C:\\secrets". The path for Linux container is "/var/secrets".
:param str name: The name of the setting.
:param str value: The value of the setting.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the setting.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> Optional[str]:
"""
The value of the setting.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class TcpConfigResponse(dict):
"""
Describes the tcp configuration for external connectivity for this network.
"""
def __init__(__self__, *,
destination: 'outputs.GatewayDestinationResponse',
name: str,
port: int):
"""
Describes the tcp configuration for external connectivity for this network.
:param 'GatewayDestinationResponse' destination: Describes destination endpoint for routing traffic.
:param str name: tcp gateway config name.
:param int port: Specifies the port at which the service endpoint below needs to be exposed.
"""
pulumi.set(__self__, "destination", destination)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "port", port)
@property
@pulumi.getter
def destination(self) -> 'outputs.GatewayDestinationResponse':
"""
Describes destination endpoint for routing traffic.
"""
return pulumi.get(self, "destination")
@property
@pulumi.getter
def name(self) -> str:
"""
tcp gateway config name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def port(self) -> int:
"""
Specifies the port at which the service endpoint below needs to be exposed.
"""
return pulumi.get(self, "port")
@pulumi.output_type
class VolumeProviderParametersAzureFileResponse(dict):
"""
This type describes a volume provided by an Azure Files file share.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "accountName":
suggest = "account_name"
elif key == "shareName":
suggest = "share_name"
elif key == "accountKey":
suggest = "account_key"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in VolumeProviderParametersAzureFileResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
VolumeProviderParametersAzureFileResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
VolumeProviderParametersAzureFileResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
account_name: str,
share_name: str,
account_key: Optional[str] = None):
"""
This type describes a volume provided by an Azure Files file share.
:param str account_name: Name of the Azure storage account for the File Share.
:param str share_name: Name of the Azure Files file share that provides storage for the volume.
:param str account_key: Access key of the Azure storage account for the File Share.
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "share_name", share_name)
if account_key is not None:
pulumi.set(__self__, "account_key", account_key)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> str:
"""
Name of the Azure storage account for the File Share.
"""
return pulumi.get(self, "account_name")
@property
@pulumi.getter(name="shareName")
def share_name(self) -> str:
"""
Name of the Azure Files file share that provides storage for the volume.
"""
return pulumi.get(self, "share_name")
@property
@pulumi.getter(name="accountKey")
def account_key(self) -> Optional[str]:
"""
Access key of the Azure storage account for the File Share.
"""
return pulumi.get(self, "account_key")
@pulumi.output_type
class VolumeReferenceResponse(dict):
"""
Describes a reference to a volume resource.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "destinationPath":
suggest = "destination_path"
elif key == "readOnly":
suggest = "read_only"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in VolumeReferenceResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
VolumeReferenceResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
VolumeReferenceResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
destination_path: str,
name: str,
read_only: Optional[bool] = None):
"""
Describes a reference to a volume resource.
:param str destination_path: The path within the container at which the volume should be mounted. Only valid path characters are allowed.
:param str name: Name of the volume being referenced.
:param bool read_only: The flag indicating whether the volume is read only. Default is 'false'.
"""
pulumi.set(__self__, "destination_path", destination_path)
pulumi.set(__self__, "name", name)
if read_only is not None:
pulumi.set(__self__, "read_only", read_only)
@property
@pulumi.getter(name="destinationPath")
def destination_path(self) -> str:
"""
The path within the container at which the volume should be mounted. Only valid path characters are allowed.
"""
return pulumi.get(self, "destination_path")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the volume being referenced.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="readOnly")
def read_only(self) -> Optional[bool]:
"""
The flag indicating whether the volume is read only. Default is 'false'.
"""
return pulumi.get(self, "read_only")
|
[
"[email protected]"
] | |
59cec4e635106d98c8f73c90d259cf476ea169bc
|
e5d059896640e25a57f29f5ec972c114f8ef5866
|
/src/scs_analysis/histo_chart.py
|
0911cb33aa5dba10c5b7846450c692df04dd9084
|
[
"MIT"
] |
permissive
|
tonybushido/scs_analysis
|
10add7b13cee29e1445ea18240bdb08e3bc908a4
|
1121be19c83b0d616772da42ea90623d6f6573c4
|
refs/heads/master
| 2021-01-03T03:11:31.474595 | 2020-02-11T14:27:32 | 2020-02-11T14:27:32 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,618 |
py
|
#!/usr/bin/env python3
"""
Created on 3 Aug 2016
@author: Bruno Beloff ([email protected])
source repo: scs_analysis
DESCRIPTION
The histo_chart utility is used to create Matplotlib histogram charts and comma-separated value (CSV) histogram files.
The utility analyses a given path to a leaf node of the input JSON data stream.
An optional "batch" ("-b") flag can be set, causing the plotting only to take place when all data points have been
received.
Depending on operating system, it may be necessary to edit the matplotlibrc file, which specifies the Matplotlib
back-end graphics system.
SYNOPSIS
histo_chart.py [-b] [-x MIN MAX] [-c BIN_COUNT] [-o FILENAME] [-e] [-v] PATH
EXAMPLES
socket_receiver.py | histo_chart.py val.CO2.cnc -x -10 10 -e -o CO2.csv
FILES
~/SCS/scs_analysis/src/scs_analysis/matplotlibrc
SEE ALSO
scs_analysis/multi_chart
scs_analysis/single_chart
"""
import sys
import warnings
from scs_analysis.chart.histo_chart import HistoChart
from scs_analysis.cmd.cmd_histo_chart import CmdHistoChart
from scs_core.data.json import JSONify
from scs_core.data.path_dict import PathDict
from scs_core.sync.line_reader import LineReader
# --------------------------------------------------------------------------------------------------------------------
if __name__ == '__main__':
warnings.filterwarnings("ignore", module="matplotlib")
# ----------------------------------------------------------------------------------------------------------------
# cmd...
cmd = CmdHistoChart()
if not cmd.is_valid():
cmd.print_help(sys.stderr)
exit(2)
if cmd.verbose:
print("histo_chart: %s" % cmd, file=sys.stderr)
chart = None
proc = None
try:
# ------------------------------------------------------------------------------------------------------------
# resources...
# reader...
reader = LineReader(sys.stdin.fileno())
if cmd.verbose:
print("histo_chart: %s" % reader, file=sys.stderr)
# chart...
chart = HistoChart(cmd.batch_mode, cmd.x[0], cmd.x[1], cmd.bin_count, cmd.path, cmd.outfile)
if cmd.verbose:
print("histo_chart: %s" % chart, file=sys.stderr)
sys.stderr.flush()
# ------------------------------------------------------------------------------------------------------------
# run...
proc = reader.start()
for line in reader.lines:
if chart.closed:
break
if line is None:
chart.pause()
continue
datum = PathDict.construct_from_jstr(line)
if datum is None:
continue
if cmd.echo:
print(JSONify.dumps(datum))
sys.stdout.flush()
chart.plot(datum)
# ----------------------------------------------------------------------------------------------------------------
# end...
except KeyboardInterrupt:
if cmd.verbose:
print("histo_chart: KeyboardInterrupt", file=sys.stderr)
# ----------------------------------------------------------------------------------------------------------------
# close...
finally:
if proc:
proc.terminate()
if chart is not None and not chart.closed:
if cmd.verbose:
print("histo_chart: holding", file=sys.stderr)
# noinspection PyBroadException
try:
chart.hold()
except Exception:
pass
|
[
"[email protected]"
] | |
3afbf51c838358e1e7c6a436752ee852c8429703
|
a298d0b4a3e9e12170651a6bf728093b4badfac7
|
/LeetCode/bstToGst.py
|
7de288787fbea1fb42cae497b5d058f68fcff994
|
[] |
no_license
|
gavinz0228/AlgoPractice
|
fc8ecd194ea2d26de59df45909838161c802b8cd
|
1cb183a326a0612a5cd941778500a8265e1d7255
|
refs/heads/master
| 2022-07-27T11:42:06.887668 | 2022-07-18T20:38:31 | 2022-07-18T20:38:31 | 172,929,652 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 629 |
py
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def bstToGst(self, root: TreeNode) -> TreeNode:
self.s = 0
newRoot = self.aux(root)
return newRoot
def aux(self, root):
if not root:
return None
rn = self.aux(root.right)
self.s += root.val
newRoot = TreeNode(self.s)
ln= self.aux(root.left)
newRoot.left = ln
newRoot.right = rn
return newRoot
|
[
"[email protected]"
] | |
e8924c21c2e48eaaef9dc5a00c052f3bf798e536
|
4d01b138ebc1b23f2758811893ce214eeb96736e
|
/104.py
|
30a114cb232f8c76739e228223eba127307c4bf6
|
[] |
no_license
|
db2398/setbeginner10
|
273c04ef12d3679e94a5478f7cd73e9dcc75bdb9
|
f33f596ad63d7d8cfdbfe205eb65c640d883c2eb
|
refs/heads/master
| 2020-06-17T04:32:42.980311 | 2019-07-08T11:42:05 | 2019-07-08T11:42:05 | 195,797,803 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 51 |
py
|
i1,i2=list(map(int,input().split()))
print(i1**i2)
|
[
"[email protected]"
] | |
2fdc9014f5baed53ee73acdfc52cbc28aebab88b
|
657aef335ad3cd75d8355aaf9bc2db6641f0ee0e
|
/10.modules/lol.py
|
7490a9c21a9a0865eb870529e3f75cd5df69f8e0
|
[] |
no_license
|
petershan1119/Python-practice
|
73179baaa662ecf1a6861d440049f71ff5685d21
|
2234f301f2324514ac23304d181c21c0a125d0dc
|
refs/heads/master
| 2020-04-13T00:41:01.306943 | 2017-09-21T05:45:30 | 2017-09-21T05:45:30 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 741 |
py
|
#from functions.game import play_game
#from functions.shop import buy_item
#import functions
from functions import play_game, buy_item, abc
from friends.chat import send_message
def turn_on():
print('= Turn on game =')
while True:
choice = input('뭐할래요\n 1: 상점, 2: 게임하기, 3: 친구에게 메시지 전송, 0: 나가기\n 선택 : ')
if choice == '0':
break
elif choice == '1':
buy_item()
elif choice == '2':
play_game()
elif choice == '3':
send_message()
else:
print('있는번호로 선택하세요')
print('--------')
print('= Turn off game =')
if __name__ == '__main__':
turn_on()
|
[
"[email protected]"
] | |
eeafb1939c0530403221403c3254a0aef2b343df
|
facb8b9155a569b09ba66aefc22564a5bf9cd319
|
/wp2/era5_scripts/02_preprocessing/lag82/426-tideGauge.py
|
48f8dd1ce1973a730042bae1b65a4df4e76305b7
|
[] |
no_license
|
moinabyssinia/modeling-global-storm-surges
|
13e69faa8f45a1244a964c5de4e2a5a6c95b2128
|
6e385b2a5f0867df8ceabd155e17ba876779c1bd
|
refs/heads/master
| 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,984 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 31 17:12:23 2020
****************************************************
Load predictors & predictands + predictor importance
****************************************************
@author: Michael Tadesse
"""
#import packages
import os
import pandas as pd
import datetime as dt #used for timedelta
from datetime import datetime
#define directories
dir_in = '/lustre/fs0/home/mtadesse/ereaFiveCombine'
dir_out = '/lustre/fs0/home/mtadesse/eraFiveLag'
def lag():
os.chdir(dir_in)
#get names
tg_list_name = os.listdir()
x = 426
y = 427
for t in range(x, y):
tg_name = tg_list_name[t]
print(tg_name, '\n')
# #check if the file exists
# os.chdir(dir_out)
# if (os.path.isfile(tg_name)):
# print('file already exists')
# continue
#cd to where the actual file is
os.chdir(dir_in)
pred = pd.read_csv(tg_name)
pred.sort_values(by = 'date', inplace=True)
pred.reset_index(inplace = True)
pred.drop('index', axis = 1, inplace = True)
#create a daily time series - date_range
#get only the ymd of the start and end times
start_time = pred['date'][0].split(' ')[0]
end_time = pred['date'].iloc[-1].split(' ')[0]
print(start_time, ' - ', end_time, '\n')
date_range = pd.date_range(start_time, end_time, freq = 'D')
#defining time changing lambda functions
time_str = lambda x: str(x)
time_converted_str = pd.DataFrame(map(time_str, date_range), columns = ['date'])
time_converted_stamp = pd.DataFrame(date_range, columns = ['timestamp'])
"""
first prepare the six time lagging dataframes
then use the merge function to merge the original
predictor with the lagging dataframes
"""
#prepare lagged time series for time only
#note here that since ERA20C has 3hrly data
#the lag_hrs is increased from 6(eraint) to 11 (era20C)
time_lagged = pd.DataFrame()
lag_hrs = [0, 6, 12, 18, 24, 30]
for lag in lag_hrs:
lag_name = 'lag'+str(lag)
lam_delta = lambda x: str(x - dt.timedelta(hours = lag))
lag_new = pd.DataFrame(map(lam_delta, time_converted_stamp['timestamp']), \
columns = [lag_name])
time_lagged = pd.concat([time_lagged, lag_new], axis = 1)
#datafrmae that contains all lagged time series (just time)
time_all = pd.concat([time_converted_str, time_lagged], axis = 1)
pred_lagged = pd.DataFrame()
for ii in range(1,time_all.shape[1]): #to loop through the lagged time series
print(time_all.columns[ii])
#extracting corresponding tag time series
lag_ts = pd.DataFrame(time_all.iloc[:,ii])
lag_ts.columns = ['date']
#merge the selected tlagged time with the predictor on = "date"
pred_new = pd.merge(pred, lag_ts, on = ['date'], how = 'right')
pred_new.drop('Unnamed: 0', axis = 1, inplace = True)
#sometimes nan values go to the bottom of the dataframe
#sort df by date -> reset the index -> remove old index
pred_new.sort_values(by = 'date', inplace=True)
pred_new.reset_index(inplace=True)
pred_new.drop('index', axis = 1, inplace= True)
#concatenate lagged dataframe
if ii == 1:
pred_lagged = pred_new
else:
pred_lagged = pd.concat([pred_lagged, pred_new.iloc[:,1:]], axis = 1)
#cd to saving directory
os.chdir(dir_out)
pred_lagged.to_csv(tg_name)
os.chdir(dir_in)
#run script
lag()
|
[
"[email protected]"
] | |
db3020dac85e7558a8c3c49c190f2af1eae54733
|
417d1065b90d3647c8cf83b89f0eb1b1a93f6b58
|
/eventsourcing/tests/datastore_tests/test_sqlalchemy.py
|
b87d159862d47372c6f27fcc632744598dd76545
|
[
"BSD-3-Clause"
] |
permissive
|
AlanFoster/eventsourcing-1
|
d725513c9f8e48b4fd40a8d449ae26acc4c6b6dc
|
964f9473043da81f80d2e9407ef7aefee02aae11
|
refs/heads/master
| 2020-08-06T12:57:27.096010 | 2019-10-01T20:21:52 | 2019-10-01T20:21:52 | 212,983,685 | 0 | 0 |
BSD-3-Clause
| 2019-10-05T10:50:13 | 2019-10-05T10:50:13 | null |
UTF-8
|
Python
| false | false | 2,949 |
py
|
from tempfile import NamedTemporaryFile
from uuid import uuid4
from sqlalchemy.exc import OperationalError, ProgrammingError
from eventsourcing.infrastructure.datastore import DatastoreTableError
from eventsourcing.infrastructure.sqlalchemy.datastore import DEFAULT_SQLALCHEMY_DB_URI, SQLAlchemyDatastore, \
SQLAlchemySettings
from eventsourcing.infrastructure.sqlalchemy.factory import SQLAlchemyInfrastructureFactory
from eventsourcing.infrastructure.sqlalchemy.records import Base, IntegerSequencedNoIDRecord, \
IntegerSequencedWithIDRecord, SnapshotRecord, TimestampSequencedNoIDRecord, TimestampSequencedWithIDRecord
from eventsourcing.tests.datastore_tests import base
class SQLAlchemyDatastoreTestCase(base.AbstractDatastoreTestCase):
"""
Base class for test cases that use an SQLAlchemy datastore.
"""
use_named_temporary_file = False
connection_strategy = 'plain'
infrastructure_factory_class = SQLAlchemyInfrastructureFactory
contiguous_record_ids = True
def construct_datastore(self):
if self.use_named_temporary_file:
self.temp_file = NamedTemporaryFile('a', delete=True)
uri = 'sqlite:///' + self.temp_file.name
else:
uri = DEFAULT_SQLALCHEMY_DB_URI
# kwargs = {}
# if not self.use_named_temporary_file:
# kwargs['connect_args'] = {'check_same_thread':False}
# kwargs['poolclass'] = StaticPool
return SQLAlchemyDatastore(
base=Base,
settings=SQLAlchemySettings(uri=uri),
tables=(
IntegerSequencedWithIDRecord,
IntegerSequencedNoIDRecord,
TimestampSequencedWithIDRecord,
TimestampSequencedNoIDRecord,
SnapshotRecord
),
connection_strategy=self.connection_strategy,
# **kwargs
)
class TestSQLAlchemyDatastore(SQLAlchemyDatastoreTestCase, base.DatastoreTestCase):
"""
Test case for SQLAlchemy datastore.
"""
def list_records(self):
try:
query = self.datastore.session.query(IntegerSequencedNoIDRecord)
return list(query)
except (OperationalError, ProgrammingError) as e:
# OperationalError from sqlite, ProgrammingError from psycopg2.
self.datastore.session.rollback()
raise DatastoreTableError(e)
finally:
self.datastore.session.close()
def create_record(self):
try:
record = IntegerSequencedNoIDRecord(
sequence_id=uuid4(),
position=0,
topic='topic',
state='{}'
)
self.datastore.session.add(record)
self.datastore.session.commit()
except (OperationalError, ProgrammingError) as e:
self.datastore.session.rollback()
raise DatastoreTableError(e)
return record
|
[
"[email protected]"
] | |
75bfc7a60ab3dc427fbbf87c0657ce0f9ce2fe2e
|
f7965f9bebae5da7f5c85fd8ee2e06c75fc6b11c
|
/examples/ai_edward32tnt.py
|
b72df68d8f452a890f9a60414784f7df4ef8a6f6
|
[] |
no_license
|
Abhishek-commits/snake-challenge
|
6a09fe613c4fe5d64587660226964dcafe9d985c
|
48bf6d26e91b09fc5420398456a558b13a9677a5
|
refs/heads/master
| 2022-12-23T07:48:55.812178 | 2020-10-01T07:20:35 | 2020-10-01T07:20:35 | 300,184,899 | 0 | 0 | null | 2020-10-01T07:19:17 | 2020-10-01T07:19:16 | null |
UTF-8
|
Python
| false | false | 11,239 |
py
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
"""
module: ai_edward32tnt
"""
from ailib import *
LEFT, UP, RIGHT, DOWN = range(4)
class AI(BaseAI):
def __init__(self):
self.name = 'edward32tnt ai %d' % random.randint(1, 1000)
types = ['python', 'ruby']
self.type = types[random.randint(0, 1)]
def setmap(self, map):
self.map = map
def get_nearest_bean(self, beans, head):
bean, distance = None, None
for b in beans:
d = abs(head[0] - b[0]) ** 2 + \
abs(head[1] - b[1]) ** 2
if not bean or d < distance:
bean = b
distance = d
return bean
def step(self, info):
"""
caculate next direction by use rating
"""
self.info = info
result = self.cmd_turn()
return result
############ 取得地图信息
# 猜测敌人头附近的问题
def set_guess_posion(self):
res = []
for snake in self.info['snakes']:
if self.head != snake['body'][0]:
for point in self.get_around(snake['body'][0]):
res.append(point)
return res
def set_others(self):
self.other_head = []
res = []
for snake in self.info['snakes']:
for body in snake['body']:
res.append(body)
if self.head != snake['body'][0]:
self.other_head.append(snake['body'][0])
return res
def set_walls(self):
res = []
for w in self.map['walls']:
res.append(w)
return res
def set_food(self):
res = []
if self.type == 'python':
food = 'eggs'
else:
food = 'gems'
for e in self.info[food]:
if [x for x in self.get_around(e, steps=2) if x in self.other_head]: continue
res.append(e)
return res
def set_posion(self):
res = []
if self.type != 'python':
posion = 'eggs'
else:
posion = 'gems'
for g in self.info[posion]:
res.append(g)
return res
###########
def cmd_turn(self):
"""
控制蛇方向
"""
direction = None
self.head = self.info['snakes'][self.seq]['body'][0]
others = self.set_others()
walls = self.set_walls()
food = self.set_food()
posion = self.set_posion()
guess_posion = self.set_guess_posion()
mapx, mapy = self.map['size']
startpoint = self.head
# 第一次尝试绝对安全路线。
# 如果没有路线,则尝试不安全路线。
next = self.find_safe_path(startpoint, food, others, walls, posion, guess_posion)
if next:
direction = self.find_next_direction_by_point(self.head, next)
else:
next = self.find_no_safe_path(startpoint, food, others, walls)
if next:
direction = self.find_next_direction_by_point(self.head, next)
#print mapdata[-mapx:]
#print mapdata[-mapx:]
#print mapw
#print maph
#print startpoint
#print endpoint
# 再没有路线则朝尾部方向寻找
if direction is None:
# 暂时先寻找自己尾部的方向移动拜托被围的问题
if not food:
direction = random.randint(0, 3)
else:
direction = self.find_next_direction_by_point(self.head, self.info['snakes'][self.me['seq']]['body'][-1])
return direction
################# 工具类可以转移出去
def find_safe_path(self, startpoint, food, others, walls, posion, guess_posion):
return self._get_path(startpoint, food, others, walls, posion, guess_posion)
def find_no_safe_path(self, startpoint, food, others, walls):
return self._get_path(startpoint, food, others, walls)
def _get_path(self, startpoint, food, others, walls, posion=[], guess_posion=[]):
counts = 0
next = None
for e in food:
endpoint = e
mapdata = []
for y in range(self.map['size'][1]):
for x in range(self.map['size'][0]):
rc = [x, y]
if rc == self.head:
mapdata.append(5)
continue
if rc == endpoint:
mapdata.append(6)
continue
if rc in others or rc in walls or rc in posion or rc in guess_posion:
mapdata.append(-1)
continue
mapdata.append(1)
astar = AStar(SQ_MapHandler(mapdata, self.map['size'][0], self.map['size'][1]))
start = SQ_Location(startpoint[0], startpoint[1])
end = SQ_Location(endpoint[0], endpoint[1])
p = astar.findPath(start, end)
if not p:continue
if len(p.nodes) < counts or next == None:
counts = len(p.nodes)
next = [p.nodes[0].location.x , p.nodes[0].location.y]
return next
def find_next_direction_by_point(self, point, next):
if point[0] < next[0]: return RIGHT
if point[0] > next[0]: return LEFT
if point[1] > next[1]: return UP
if point[1] < next[1]: return DOWN
def find_next_point_by_direction(self, point, direction, step):
if direction == LEFT: return [point[0] - step, point[1]]
if direction == RIGHT: return [point[0] + step, point[1]]
if direction == UP: return [point[0], point[1] - step]
if direction == DOWN: return [point[0], point[1] + step]
def get_around(self, point, steps=1):
for step in range(steps):
for d in (LEFT, UP, RIGHT, DOWN):
yield self.find_next_point_by_direction(point, d, step+1)
############## ############
# Version 1.1
#
# Changes in 1.1:
# In order to optimize the list handling I implemented the location id (lid) attribute.
# This will make the all list serahces to become extremely more optimized.
class Path:
def __init__(self,nodes, totalCost):
self.nodes = nodes;
self.totalCost = totalCost;
def getNodes(self):
return self.nodes
def getTotalMoveCost(self):
return self.totalCost
class Node:
def __init__(self,location,mCost,lid,parent=None):
self.location = location # where is this node located
self.mCost = mCost # total move cost to reach this node
self.parent = parent # parent node
self.score = 0 # calculated score for this node
self.lid = lid # set the location id - unique for each location in the map
def __eq__(self, n):
if n.lid == self.lid:
return 1
else:
return 0
class AStar:
def __init__(self,maphandler):
self.mh = maphandler
def _getBestOpenNode(self):
bestNode = None
for n in self.on:
if not bestNode:
bestNode = n
else:
if n.score<=bestNode.score:
bestNode = n
return bestNode
def _tracePath(self,n):
nodes = [];
totalCost = n.mCost;
p = n.parent;
nodes.insert(0,n);
while 1:
if p.parent is None:
break
nodes.insert(0,p)
p=p.parent
return Path(nodes,totalCost)
def _handleNode(self,node,end):
i = self.o.index(node.lid)
self.on.pop(i)
self.o.pop(i)
self.c.append(node.lid)
nodes = self.mh.getAdjacentNodes(node,end)
for n in nodes:
if n.location.x % self.mh.w == end.x and n.location.y % self.mh.h == end.y:
# reached the destination
return n
elif n.lid in self.c:
# already in close, skip this
continue
elif n.lid in self.o:
# already in open, check if better score
i = self.o.index(n.lid)
on = self.on[i];
if n.mCost<on.mCost:
self.on.pop(i);
self.o.pop(i);
self.on.append(n);
self.o.append(n.lid);
else:
# new node, append to open list
self.on.append(n);
self.o.append(n.lid);
return None
def findPath(self,fromlocation, tolocation):
self.o = []
self.on = []
self.c = []
end = tolocation
fnode = self.mh.getNode(fromlocation)
self.on.append(fnode)
self.o.append(fnode.lid)
nextNode = fnode
while nextNode is not None:
finish = self._handleNode(nextNode,end)
if finish:
return self._tracePath(finish)
nextNode=self._getBestOpenNode()
return None
class SQ_Location:
"""A simple Square Map Location implementation"""
def __init__(self,x,y):
self.x = x
self.y = y
def __eq__(self, l):
"""MUST BE IMPLEMENTED"""
if l.x == self.x and l.y == self.y:
return 1
else:
return 0
class SQ_MapHandler:
"""A simple Square Map implementation"""
def __init__(self,mapdata,width,height):
self.m = mapdata
self.w = width
self.h = height
def getNode(self, location):
"""MUST BE IMPLEMENTED"""
x = location.x
y = location.y
if x<0 or x>=self.w or y<0 or y>=self.h:
#return None
x = x % self.w
y = y % self.h
d = self.m[(y*self.w)+x]
if d == -1:
return None
return Node(location,d,((y*self.w)+x));
def getAdjacentNodes(self, curnode, dest):
"""MUST BE IMPLEMENTED"""
result = []
cl = curnode.location
dl = dest
n = self._handleNode(cl.x+1,cl.y,curnode,dl.x,dl.y)
if n: result.append(n)
n = self._handleNode(cl.x-1,cl.y,curnode,dl.x,dl.y)
if n: result.append(n)
n = self._handleNode(cl.x,cl.y+1,curnode,dl.x,dl.y)
if n: result.append(n)
n = self._handleNode(cl.x,cl.y-1,curnode,dl.x,dl.y)
if n: result.append(n)
return result
def _handleNode(self,x,y,fromnode,destx,desty):
n = self.getNode(SQ_Location(x,y))
if n is not None:
dx = min(abs(x - destx), self.w - abs(x-destx))
dy = min(abs(y - desty), self.h - abs(y-desty))
emCost = dx+dy
n.mCost += fromnode.mCost
n.score = n.mCost+emCost
n.parent=fromnode
return n
return None
if __name__=="__main__":
cmd_run(AI)
|
[
"[email protected]"
] | |
73bd1399a667fde2ebc55ea845c58e86949ac808
|
e4eabccc6d971289cf13653d1b6f290e39b870ab
|
/1227-number-of-equivalent-domino-pairs/number-of-equivalent-domino-pairs.py
|
50269f16fcfe4f0d8b0cc7b72998c9203a01c939
|
[] |
no_license
|
HEroKuma/leetcode
|
128b38a9f559dc9e3f21c86a47ede67ad72f7675
|
b3045aaedbe98eddc7e4e518a03a9337a63be716
|
refs/heads/master
| 2023-01-03T12:12:31.018717 | 2020-11-01T16:56:47 | 2020-11-01T16:56:47 | 260,488,865 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 710 |
py
|
# Given a list of dominoes, dominoes[i] = [a, b] is equivalent to dominoes[j] = [c, d] if and only if either (a==c and b==d), or (a==d and b==c) - that is, one domino can be rotated to be equal to another domino.
#
# Return the number of pairs (i, j) for which 0 <= i < j < dominoes.length, and dominoes[i] is equivalent to dominoes[j].
#
#
# Example 1:
# Input: dominoes = [[1,2],[2,1],[3,4],[5,6]]
# Output: 1
#
#
# Constraints:
#
#
# 1 <= dominoes.length <= 40000
# 1 <= dominoes[i][j] <= 9
#
class Solution:
def numEquivDominoPairs(self, dominoes: List[List[int]]) -> int:
return sum( [ (v-1)*v//2 for v in collections.Counter([ 10*min(i)+max(i) for i in dominoes ]).values() ])
|
[
"[email protected]"
] | |
f88f886baeef5abcf18ab0d0cbffcb906bd5e6e1
|
d458b72b4d0e5c51446bb8b9f8a6276015dfb594
|
/supervised_learning/0x0E-time_series/main.py
|
138a3132081e49e51e938fc6a075feb8c63d734a
|
[] |
no_license
|
mecomontes/Machine-Learning-projects
|
d6588cfaa7d020d3fae0fb74f6550c9e84500578
|
50e1828b58bb58eecfd3a142501b37fe701f4e49
|
refs/heads/main
| 2023-07-14T12:30:19.792332 | 2021-08-29T15:33:16 | 2021-08-29T15:33:16 | 376,129,791 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 534 |
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun May 4 5:40:12 2021
@author: Robinson Montes
"""
import pandas as pd
import datetime as dt
preprocess = __import__('preprocess_data').preprocessing
if __name__ == "__main__":
file_path = '../data/coinbaseUSD_1-min_data_2014-12-01_to_2019-01-09.csv'
train, validation, test = preprocess(file_path)
print('Train values: ')
print(train.head())
print('Validation values:')
print(validation.head())
print('Test values')
print(test.head())
|
[
"[email protected]"
] | |
745e8be5553cabb487ef2d2a32d9fd05f5ba9c87
|
6e00e1ad30e19635c943b370a1aaf9b7eab4beb8
|
/backend/chat_user_profile/migrations/0001_initial.py
|
f41e67e03a512f175dd8bb3a41235a0793f1b06d
|
[] |
no_license
|
crowdbotics-apps/chat-28286
|
c06bafd9ba1bd8b821fd6b76f7580cf88caae44d
|
3aa33ad493ab36e00f136654aa96e6ddc4b57135
|
refs/heads/master
| 2023-06-22T01:56:22.263016 | 2021-06-29T06:53:41 | 2021-06-29T06:53:41 | 381,143,777 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,659 |
py
|
# Generated by Django 2.2.20 on 2021-06-28 19:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mobile_number', models.CharField(max_length=20)),
('pin', models.CharField(max_length=100)),
('photo', models.URLField()),
('status', models.CharField(max_length=50)),
('birthdate', models.DateField()),
('gender', models.CharField(max_length=1)),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('last_updated', models.DateTimeField(auto_now=True)),
('last_login', models.DateTimeField()),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile_user', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='VerificationCode',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=255)),
('is_verified', models.BooleanField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('timestamp_verified', models.DateTimeField()),
('sent_to', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='verificationcode_sent_to', to='chat_user_profile.Profile')),
],
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_blocked', models.BooleanField()),
('is_favorite', models.BooleanField()),
('timestamp_created', models.DateTimeField(auto_now_add=True)),
('added_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_added_by', to=settings.AUTH_USER_MODEL)),
('added_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_added_profile', to='chat_user_profile.Profile')),
],
),
]
|
[
"[email protected]"
] | |
1fe6c3653aa6425f672737d1c45d2a5983869d29
|
d5edda31631c8e71e2991817f713ba30484245c6
|
/Tools/BuildSlaveSupport/ews-build/steps_unittest.py
|
3448fa2c1efcbcd28df18de094f97d74c064af12
|
[] |
no_license
|
M4cs/webkit
|
be0a5e8a6ade907343cb29e44422224749255411
|
a4feb0de129f8f6df3db666b0dfce89dd1ff7e40
|
refs/heads/master
| 2023-05-28T13:04:09.512865 | 2019-06-24T19:57:46 | 2019-06-24T19:57:46 | 193,574,901 | 1 | 0 | null | 2019-06-24T20:26:45 | 2019-06-24T20:26:45 | null |
UTF-8
|
Python
| false | false | 66,458 |
py
|
# Copyright (C) 2018-2019 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import operator
import os
import shutil
import tempfile
from buildbot.process import remotetransfer
from buildbot.process.results import Results, SUCCESS, FAILURE, WARNINGS, SKIPPED, EXCEPTION, RETRY
from buildbot.test.fake.remotecommand import Expect, ExpectRemoteRef, ExpectShell
from buildbot.test.util.steps import BuildStepMixin
from mock import call
from twisted.internet import error, reactor
from twisted.python import failure, log
from twisted.trial import unittest
from steps import (AnalyzeAPITestsResults, AnalyzeCompileWebKitResults, ApplyPatch, ArchiveBuiltProduct, ArchiveTestResults,
CheckOutSource, CheckOutSpecificRevision, CheckPatchRelevance, CheckStyle, CleanBuild, CleanWorkingDirectory,
CompileJSCOnly, CompileJSCOnlyToT, CompileWebKit, CompileWebKitToT, ConfigureBuild,
DownloadBuiltProduct, ExtractBuiltProduct, ExtractTestResults, KillOldProcesses,
PrintConfiguration, ReRunAPITests, ReRunJavaScriptCoreTests, RunAPITests, RunAPITestsWithoutPatch,
RunBindingsTests, RunJavaScriptCoreTests, RunJavaScriptCoreTestsToT, RunWebKit1Tests, RunWebKitPerlTests,
RunWebKitPyTests, RunWebKitTests, TestWithFailureCount, Trigger, TransferToS3, UnApplyPatchIfRequired,
UploadBuiltProduct, UploadTestResults, ValidatePatch)
# Workaround for https://github.com/buildbot/buildbot/issues/4669
from buildbot.test.fake.fakebuild import FakeBuild
FakeBuild.addStepsAfterCurrentStep = lambda FakeBuild, step_factories: None
def mock_step(step, logs='', results=SUCCESS, stopped=False, properties=None):
step.logs = logs
step.results = results
step.stopped = stopped
return step
class ExpectMasterShellCommand(object):
def __init__(self, command, workdir=None, env=None, usePTY=0):
self.args = command
self.usePTY = usePTY
self.rc = None
self.path = None
self.logs = []
if env is not None:
self.env = env
else:
self.env = os.environ
if workdir:
self.path = os.path.join(os.getcwd(), workdir)
@classmethod
def log(self, name, value):
return ('log', name, value)
def __add__(self, other):
if isinstance(other, int):
self.rc = other
elif isinstance(other, tuple) and other[0] == 'log':
self.logs.append((other[1], other[2]))
return self
def __repr__(self):
return 'ExpectMasterShellCommand({0})'.format(repr(self.args))
class BuildStepMixinAdditions(BuildStepMixin):
def setUpBuildStep(self):
self.patch(reactor, 'spawnProcess', lambda *args, **kwargs: self._checkSpawnProcess(*args, **kwargs))
self._expected_local_commands = []
self._temp_directory = tempfile.mkdtemp()
os.chdir(self._temp_directory)
self._expected_uploaded_files = []
super(BuildStepMixinAdditions, self).setUpBuildStep()
def tearDownBuildStep(self):
shutil.rmtree(self._temp_directory)
super(BuildStepMixinAdditions, self).tearDownBuildStep()
def fakeBuildFinished(self, text, results):
self.build.text = text
self.build.results = results
def setupStep(self, step, *args, **kwargs):
self.previous_steps = kwargs.get('previous_steps') or []
if self.previous_steps:
del kwargs['previous_steps']
super(BuildStepMixinAdditions, self).setupStep(step, *args, **kwargs)
self.build.terminate = False
self.build.stopped = False
self.build.executedSteps = self.executedSteps
self.build.buildFinished = self.fakeBuildFinished
self._expected_added_urls = []
self._expected_sources = None
@property
def executedSteps(self):
return filter(lambda step: not step.stopped, self.previous_steps)
def setProperty(self, name, value, source='Unknown'):
self.properties.setProperty(name, value, source)
def expectAddedURLs(self, added_urls):
self._expected_added_urls = added_urls
def expectUploadedFile(self, path):
self._expected_uploaded_files.append(path)
def expectLocalCommands(self, *expected_commands):
self._expected_local_commands.extend(expected_commands)
def expectRemoteCommands(self, *expected_commands):
self.expectCommands(*expected_commands)
def expectSources(self, expected_sources):
self._expected_sources = expected_sources
def _checkSpawnProcess(self, processProtocol, executable, args, env, path, usePTY, **kwargs):
got = (executable, args, env, path, usePTY)
if not self._expected_local_commands:
self.fail('got local command {0} when no further commands were expected'.format(got))
local_command = self._expected_local_commands.pop(0)
try:
self.assertEqual(got, (local_command.args[0], local_command.args, local_command.env, local_command.path, local_command.usePTY))
except AssertionError:
log.err()
raise
for name, value in local_command.logs:
if name == 'stdout':
processProtocol.outReceived(value)
elif name == 'stderr':
processProtocol.errReceived(value)
if local_command.rc != 0:
value = error.ProcessTerminated(exitCode=local_command.rc)
else:
value = error.ProcessDone(None)
processProtocol.processEnded(failure.Failure(value))
def _added_files(self):
results = []
for dirpath, dirnames, filenames in os.walk(self._temp_directory):
relative_root_path = os.path.relpath(dirpath, start=self._temp_directory)
if relative_root_path == '.':
relative_root_path = ''
for name in filenames:
results.append(os.path.join(relative_root_path, name))
return results
def runStep(self):
def check(result):
self.assertEqual(self._expected_local_commands, [], 'assert all expected local commands were run')
self.expectAddedURLs(self._expected_added_urls)
self.assertEqual(self._added_files(), self._expected_uploaded_files)
if self._expected_sources is not None:
# Convert to dictionaries because assertEqual() only knows how to diff Python built-in types.
actual_sources = sorted([source.asDict() for source in self.build.sources], key=operator.itemgetter('codebase'))
expected_sources = sorted([source.asDict() for source in self._expected_sources], key=operator.itemgetter('codebase'))
self.assertEqual(actual_sources, expected_sources)
deferred_result = super(BuildStepMixinAdditions, self).runStep()
deferred_result.addCallback(check)
return deferred_result
def uploadFileWithContentsOfString(string, timestamp=None):
def behavior(command):
writer = command.args['writer']
writer.remote_write(string + '\n')
writer.remote_close()
if timestamp:
writer.remote_utime(timestamp)
return behavior
class TestCheckStyle(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success_internal(self):
self.setupStep(CheckStyle())
self.setProperty('try-codebase', 'internal')
self.setProperty('platform', 'mac')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/check-webkit-style'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='check-webkit-style')
return self.runStep()
def test_failure_unknown_try_codebase(self):
self.setupStep(CheckStyle())
self.setProperty('try-codebase', 'foo')
self.setProperty('platform', 'mac')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/check-webkit-style'],
)
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='check-webkit-style (failure)')
return self.runStep()
def test_failures_with_style_issues(self):
self.setupStep(CheckStyle())
self.setProperty('try-codebase', 'internal')
self.setProperty('platform', 'mac')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/check-webkit-style'],
)
+ ExpectShell.log('stdio', stdout='''ERROR: Source/WebCore/layout/FloatingContext.cpp:36: Code inside a namespace should not be indented. [whitespace/indent] [4]
ERROR: Source/WebCore/layout/FormattingContext.h:94: Weird number of spaces at line-start. Are you using a 4-space indent? [whitespace/indent] [3]
ERROR: Source/WebCore/layout/LayoutContext.cpp:52: Place brace on its own line for function definitions. [whitespace/braces] [4]
ERROR: Source/WebCore/layout/LayoutContext.cpp:55: Extra space before last semicolon. If this should be an empty statement, use { } instead. [whitespace/semicolon] [5]
ERROR: Source/WebCore/layout/LayoutContext.cpp:60: Tab found; better to use spaces [whitespace/tab] [1]
ERROR: Source/WebCore/layout/Verification.cpp:88: Missing space before ( in while( [whitespace/parens] [5]
Total errors found: 8 in 48 files''')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='8 style errors (failure)')
return self.runStep()
def test_failures_no_style_issues(self):
self.setupStep(CheckStyle())
self.setProperty('try-codebase', 'internal')
self.setProperty('platform', 'mac')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/check-webkit-style'],
)
+ ExpectShell.log('stdio', stdout='Total errors found: 0 in 6 files')
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='check-webkit-style')
return self.runStep()
def test_failures_no_changes(self):
self.setupStep(CheckStyle())
self.setProperty('try-codebase', 'internal')
self.setProperty('platform', 'mac')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/check-webkit-style'],
)
+ ExpectShell.log('stdio', stdout='Total errors found: 0 in 0 files')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='check-webkit-style (failure)')
return self.runStep()
class TestRunBindingsTests(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
self.jsonFileName = 'bindings_test_results.json'
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(RunBindingsTests())
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
timeout=300,
command=['Tools/Scripts/run-bindings-tests', '--json-output={0}'.format(self.jsonFileName)],
logfiles={'json': self.jsonFileName},
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Passed bindings tests')
return self.runStep()
def test_failure(self):
self.setupStep(RunBindingsTests())
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
timeout=300,
command=['Tools/Scripts/run-bindings-tests', '--json-output={0}'.format(self.jsonFileName)],
logfiles={'json': self.jsonFileName},
)
+ ExpectShell.log('stdio', stdout='FAIL: (JS) JSTestInterface.cpp')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='bindings-tests (failure)')
return self.runStep()
class TestRunWebKitPerlTests(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(RunWebKitPerlTests())
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/test-webkitperl'],
timeout=120,
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='webkitperl-tests')
return self.runStep()
def test_failure(self):
self.setupStep(RunWebKitPerlTests())
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/test-webkitperl'],
timeout=120,
)
+ ExpectShell.log('stdio', stdout='''Failed tests: 1-3, 5-7, 9, 11-13
Files=40, Tests=630, 4 wallclock secs ( 0.16 usr 0.09 sys + 2.78 cusr 0.64 csys = 3.67 CPU)
Result: FAIL
Failed 1/40 test programs. 10/630 subtests failed.''')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='webkitperl-tests (failure)')
return self.runStep()
class TestWebKitPyTests(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
self.jsonFileName = 'webkitpy_test_results.json'
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(RunWebKitPyTests())
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/test-webkitpy', '--json-output={0}'.format(self.jsonFileName)],
logfiles={'json': self.jsonFileName},
timeout=120,
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Passed webkitpy tests')
return self.runStep()
def test_failure(self):
self.setupStep(RunWebKitPyTests())
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/test-webkitpy', '--json-output={0}'.format(self.jsonFileName)],
logfiles={'json': self.jsonFileName},
timeout=120,
)
+ ExpectShell.log('stdio', stdout='''Ran 1744 tests in 5.913s
FAILED (failures=1, errors=0)''')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='webkitpy-tests (failure)')
return self.runStep()
class TestKillOldProcesses(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(KillOldProcesses())
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/kill-old-processes', 'buildbot'],
timeout=60,
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Killed old processes')
return self.runStep()
def test_failure(self):
self.setupStep(KillOldProcesses())
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/kill-old-processes', 'buildbot'],
timeout=60,
)
+ ExpectShell.log('stdio', stdout='Unexpected error.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Killed old processes (failure)')
return self.runStep()
class TestCleanBuild(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(CleanBuild())
self.setProperty('fullPlatform', 'ios-11')
self.setProperty('configuration', 'release')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/clean-build', '--platform=ios-11', '--release'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Deleted WebKitBuild directory')
return self.runStep()
def test_failure(self):
self.setupStep(CleanBuild())
self.setProperty('fullPlatform', 'ios-simulator-11')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/clean-build', '--platform=ios-simulator-11', '--debug'],
)
+ ExpectShell.log('stdio', stdout='Unexpected error.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Deleted WebKitBuild directory (failure)')
return self.runStep()
class TestCompileWebKit(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(CompileWebKit())
self.setProperty('fullPlatform', 'ios-simulator-11')
self.setProperty('configuration', 'release')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/build-webkit', '--release'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Compiled WebKit')
return self.runStep()
def test_failure(self):
self.setupStep(CompileWebKit())
self.setProperty('fullPlatform', 'mac-sierra')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/build-webkit', '--debug'],
)
+ ExpectShell.log('stdio', stdout='1 error generated.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Compiled WebKit (failure)')
return self.runStep()
class TestCompileWebKitToT(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(CompileWebKitToT())
self.setProperty('fullPlatform', 'ios-simulator-11')
self.setProperty('configuration', 'release')
self.setProperty('patchFailedToBuild', True)
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/build-webkit', '--release'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Compiled WebKit')
return self.runStep()
def test_failure(self):
self.setupStep(CompileWebKitToT())
self.setProperty('fullPlatform', 'mac-sierra')
self.setProperty('configuration', 'debug')
self.setProperty('patchFailedToBuild', True)
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/build-webkit', '--debug'],
)
+ ExpectShell.log('stdio', stdout='1 error generated.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Compiled WebKit (failure)')
return self.runStep()
def test_skip(self):
self.setupStep(CompileWebKitToT())
self.setProperty('fullPlatform', 'ios-simulator-11')
self.setProperty('configuration', 'release')
self.expectHidden(True)
self.expectOutcome(result=SKIPPED, state_string='Compiled WebKit (skipped)')
return self.runStep()
class TestAnalyzeCompileWebKitResults(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_patch_with_build_failure(self):
previous_steps = [
mock_step(CompileWebKit(), results=FAILURE),
mock_step(CompileWebKitToT(), results=SUCCESS),
]
self.setupStep(AnalyzeCompileWebKitResults(), previous_steps=previous_steps)
self.expectOutcome(result=FAILURE, state_string='Patch does not build (failure)')
return self.runStep()
def test_patch_with_ToT_failure(self):
previous_steps = [
mock_step(CompileWebKit(), results=FAILURE),
mock_step(CompileWebKitToT(), results=FAILURE),
]
self.setupStep(AnalyzeCompileWebKitResults(), previous_steps=previous_steps)
self.expectOutcome(result=FAILURE, state_string='Unable to build WebKit without patch, retrying build (failure)')
return self.runStep()
class TestCompileJSCOnly(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(CompileJSCOnly())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'release')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/build-jsc', '--release'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Compiled JSC')
return self.runStep()
def test_failure(self):
self.setupStep(CompileJSCOnly())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/build-jsc', '--debug'],
)
+ ExpectShell.log('stdio', stdout='1 error generated.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Compiled JSC (failure)')
return self.runStep()
class TestCompileJSCOnlyToT(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(CompileJSCOnlyToT())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'release')
self.setProperty('patchFailedToBuild', 'True')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/build-jsc', '--release'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Compiled JSC')
return self.runStep()
def test_failure(self):
self.setupStep(CompileJSCOnlyToT())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'debug')
self.setProperty('patchFailedToBuild', 'True')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/build-jsc', '--debug'],
)
+ ExpectShell.log('stdio', stdout='1 error generated.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Compiled JSC (failure)')
return self.runStep()
def test_skip(self):
self.setupStep(CompileJSCOnlyToT())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'debug')
self.expectHidden(True)
self.expectOutcome(result=SKIPPED, state_string='Compiled JSC (skipped)')
return self.runStep()
class TestRunJavaScriptCoreTests(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
self.jsonFileName = 'jsc_results.json'
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(RunJavaScriptCoreTests())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'release')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/run-javascriptcore-tests', '--no-build', '--no-fail-fast', '--json-output={0}'.format(self.jsonFileName), '--release'],
logfiles={'json': self.jsonFileName},
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='jscore-tests')
return self.runStep()
def test_failure(self):
self.setupStep(RunJavaScriptCoreTests())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/run-javascriptcore-tests', '--no-build', '--no-fail-fast', '--json-output={0}'.format(self.jsonFileName), '--debug'],
logfiles={'json': self.jsonFileName},
)
+ ExpectShell.log('stdio', stdout='9 failures found.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='jscore-tests (failure)')
return self.runStep()
class TestReRunJavaScriptCoreTests(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
self.jsonFileName = 'jsc_results.json'
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(ReRunJavaScriptCoreTests())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'release')
self.setProperty('patchFailedJSCTests', 'True')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/run-javascriptcore-tests', '--no-build', '--no-fail-fast', '--json-output={0}'.format(self.jsonFileName), '--release'],
logfiles={'json': self.jsonFileName},
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='jscore-tests')
return self.runStep()
def test_failure(self):
self.setupStep(ReRunJavaScriptCoreTests())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'debug')
self.setProperty('patchFailedJSCTests', 'True')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/run-javascriptcore-tests', '--no-build', '--no-fail-fast', '--json-output={0}'.format(self.jsonFileName), '--debug'],
logfiles={'json': self.jsonFileName},
)
+ ExpectShell.log('stdio', stdout='9 failures found.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='jscore-tests (failure)')
return self.runStep()
def test_skip(self):
self.setupStep(ReRunJavaScriptCoreTests())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'debug')
self.expectHidden(True)
self.expectOutcome(result=SKIPPED, state_string='jscore-tests (skipped)')
return self.runStep()
class TestRunJavaScriptCoreTestsToT(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
self.jsonFileName = 'jsc_results.json'
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(RunJavaScriptCoreTestsToT())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'release')
self.setProperty('patchFailedJSCTests', 'True')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/run-javascriptcore-tests', '--no-fail-fast', '--json-output={0}'.format(self.jsonFileName), '--release'],
logfiles={'json': self.jsonFileName},
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='jscore-tests')
return self.runStep()
def test_failure(self):
self.setupStep(RunJavaScriptCoreTestsToT())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'debug')
self.setProperty('patchFailedJSCTests', 'True')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['perl', 'Tools/Scripts/run-javascriptcore-tests', '--no-fail-fast', '--json-output={0}'.format(self.jsonFileName), '--debug'],
logfiles={'json': self.jsonFileName},
)
+ ExpectShell.log('stdio', stdout='9 failures found.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='jscore-tests (failure)')
return self.runStep()
def test_skip(self):
self.setupStep(RunJavaScriptCoreTestsToT())
self.setProperty('fullPlatform', 'jsc-only')
self.setProperty('configuration', 'debug')
self.expectHidden(True)
self.expectOutcome(result=SKIPPED, state_string='jscore-tests (skipped)')
return self.runStep()
class TestRunWebKitTests(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(RunWebKitTests())
self.setProperty('fullPlatform', 'ios-simulator')
self.setProperty('configuration', 'release')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/Scripts/run-webkit-tests', '--no-build', '--no-new-test-results', '--no-show-results', '--exit-after-n-failures', '30', '--skip-failing-tests', '--release', '--results-directory', 'layout-test-results', '--debug-rwt-logging'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Passed layout tests')
return self.runStep()
def test_failure(self):
self.setupStep(RunWebKitTests())
self.setProperty('fullPlatform', 'ios-simulator')
self.setProperty('configuration', 'release')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/Scripts/run-webkit-tests', '--no-build', '--no-new-test-results', '--no-show-results', '--exit-after-n-failures', '30', '--skip-failing-tests', '--release', '--results-directory', 'layout-test-results', '--debug-rwt-logging'],
)
+ ExpectShell.log('stdio', stdout='9 failures found.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='layout-tests (failure)')
return self.runStep()
class TestRunWebKit1Tests(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(RunWebKit1Tests())
self.setProperty('fullPlatform', 'ios-11')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/Scripts/run-webkit-tests', '--no-build', '--no-new-test-results', '--no-show-results', '--exit-after-n-failures', '30', '--skip-failing-tests', '--debug', '--dump-render-tree', '--results-directory', 'layout-test-results', '--debug-rwt-logging'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Passed layout tests')
return self.runStep()
def test_failure(self):
self.setupStep(RunWebKit1Tests())
self.setProperty('fullPlatform', 'ios-11')
self.setProperty('configuration', 'release')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/Scripts/run-webkit-tests', '--no-build', '--no-new-test-results', '--no-show-results', '--exit-after-n-failures', '30', '--skip-failing-tests', '--release', '--dump-render-tree', '--results-directory', 'layout-test-results', '--debug-rwt-logging'],
)
+ ExpectShell.log('stdio', stdout='9 failures found.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='layout-tests (failure)')
return self.runStep()
class TestCheckOutSpecificRevision(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(CheckOutSpecificRevision())
self.setProperty('ews_revision', '1a3425cb92dbcbca12a10aa9514f1b77c76dc26')
self.expectHidden(False)
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
timeout=1200,
command=['git', 'checkout', '1a3425cb92dbcbca12a10aa9514f1b77c76dc26'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Checked out required revision')
return self.runStep()
def test_failure(self):
self.setupStep(CheckOutSpecificRevision())
self.setProperty('ews_revision', '1a3425cb92dbcbca12a10aa9514f1b77c76dc26')
self.expectHidden(False)
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
timeout=1200,
command=['git', 'checkout', '1a3425cb92dbcbca12a10aa9514f1b77c76dc26'],
)
+ ExpectShell.log('stdio', stdout='Unexpected failure')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Checked out required revision (failure)')
return self.runStep()
def test_skip(self):
self.setupStep(CheckOutSpecificRevision())
self.expectHidden(True)
self.expectOutcome(result=SKIPPED, state_string='Checked out required revision (skipped)')
return self.runStep()
class TestCleanWorkingDirectory(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(CleanWorkingDirectory())
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/clean-webkit'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Cleaned working directory')
return self.runStep()
def test_failure(self):
self.setupStep(CleanWorkingDirectory())
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/clean-webkit'],
)
+ ExpectShell.log('stdio', stdout='Unexpected failure.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Cleaned working directory (failure)')
return self.runStep()
class TestUnApplyPatchIfRequired(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(UnApplyPatchIfRequired())
self.setProperty('patchFailedToBuild', True)
self.expectHidden(False)
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/clean-webkit'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Unapplied patch')
return self.runStep()
def test_failure(self):
self.setupStep(UnApplyPatchIfRequired())
self.setProperty('patchFailedToBuild', True)
self.expectHidden(False)
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['Tools/Scripts/clean-webkit'],
)
+ ExpectShell.log('stdio', stdout='Unexpected failure.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Unapplied patch (failure)')
return self.runStep()
def test_skip(self):
self.setupStep(UnApplyPatchIfRequired())
self.expectHidden(True)
self.expectOutcome(result=SKIPPED, state_string='Unapplied patch (skipped)')
return self.runStep()
class TestArchiveBuiltProduct(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(ArchiveBuiltProduct())
self.setProperty('fullPlatform', 'ios-simulator')
self.setProperty('configuration', 'release')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/built-product-archive', '--platform=ios-simulator', '--release', 'archive'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Archived built product')
return self.runStep()
def test_failure(self):
self.setupStep(ArchiveBuiltProduct())
self.setProperty('fullPlatform', 'mac-sierra')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/built-product-archive', '--platform=mac-sierra', '--debug', 'archive'],
)
+ ExpectShell.log('stdio', stdout='Unexpected failure.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Archived built product (failure)')
return self.runStep()
class TestUploadBuiltProduct(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(UploadBuiltProduct())
self.setProperty('fullPlatform', 'mac-sierra')
self.setProperty('configuration', 'release')
self.setProperty('architecture', 'x86_64')
self.setProperty('patch_id', '1234')
self.expectHidden(False)
self.expectRemoteCommands(
Expect('uploadFile', dict(
workersrc='WebKitBuild/release.zip', workdir='wkdir',
blocksize=1024 * 256, maxsize=None, keepstamp=False,
writer=ExpectRemoteRef(remotetransfer.FileWriter),
))
+ Expect.behavior(uploadFileWithContentsOfString('Dummy zip file content.'))
+ 0,
)
self.expectUploadedFile('public_html/archives/mac-sierra-x86_64-release/1234.zip')
self.expectOutcome(result=SUCCESS, state_string='Uploaded built product')
return self.runStep()
def test_failure(self):
self.setupStep(UploadBuiltProduct())
self.setProperty('fullPlatform', 'mac-sierra')
self.setProperty('configuration', 'release')
self.setProperty('architecture', 'x86_64')
self.setProperty('patch_id', '1234')
self.expectHidden(False)
self.expectRemoteCommands(
Expect('uploadFile', dict(
workersrc='WebKitBuild/release.zip', workdir='wkdir',
blocksize=1024 * 256, maxsize=None, keepstamp=False,
writer=ExpectRemoteRef(remotetransfer.FileWriter),
))
+ Expect.behavior(uploadFileWithContentsOfString('Dummy zip file content.'))
+ 1,
)
self.expectUploadedFile('public_html/archives/mac-sierra-x86_64-release/1234.zip')
self.expectOutcome(result=FAILURE, state_string='Failed to upload built product')
return self.runStep()
class TestDownloadBuiltProduct(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(DownloadBuiltProduct())
self.setProperty('fullPlatform', 'ios-simulator-12')
self.setProperty('configuration', 'release')
self.setProperty('architecture', 'x86_64')
self.setProperty('patch_id', '1234')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/download-built-product', '--release', 'https://s3-us-west-2.amazonaws.com/ews-archives.webkit.org/ios-simulator-12-x86_64-release/1234.zip'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Downloaded built product')
return self.runStep()
def test_failure(self):
self.setupStep(DownloadBuiltProduct())
self.setProperty('fullPlatform', 'mac-sierra')
self.setProperty('configuration', 'debug')
self.setProperty('architecture', 'x86_64')
self.setProperty('patch_id', '123456')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/download-built-product', '--debug', 'https://s3-us-west-2.amazonaws.com/ews-archives.webkit.org/mac-sierra-x86_64-debug/123456.zip'],
)
+ ExpectShell.log('stdio', stdout='Unexpected failure.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Failed to download built product from S3')
return self.runStep()
class TestExtractBuiltProduct(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(ExtractBuiltProduct())
self.setProperty('fullPlatform', 'ios-simulator')
self.setProperty('configuration', 'release')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/built-product-archive', '--platform=ios-simulator', '--release', 'extract'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Extracted built product')
return self.runStep()
def test_failure(self):
self.setupStep(ExtractBuiltProduct())
self.setProperty('fullPlatform', 'mac-sierra')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/built-product-archive', '--platform=mac-sierra', '--debug', 'extract'],
)
+ ExpectShell.log('stdio', stdout='Unexpected failure.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Extracted built product (failure)')
return self.runStep()
class TestTransferToS3(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(TransferToS3())
self.setProperty('fullPlatform', 'mac-highsierra')
self.setProperty('configuration', 'release')
self.setProperty('architecture', 'x86_64')
self.setProperty('patch_id', '1234')
self.expectLocalCommands(
ExpectMasterShellCommand(command=['python',
'../Shared/transfer-archive-to-s3',
'--patch_id', '1234',
'--identifier', 'mac-highsierra-x86_64-release',
'--archive', 'public_html/archives/mac-highsierra-x86_64-release/1234.zip',
])
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Transferred archive to S3')
return self.runStep()
def test_failure(self):
self.setupStep(TransferToS3())
self.setProperty('fullPlatform', 'ios-simulator-12')
self.setProperty('configuration', 'debug')
self.setProperty('architecture', 'x86_64')
self.setProperty('patch_id', '1234')
self.expectLocalCommands(
ExpectMasterShellCommand(command=['python',
'../Shared/transfer-archive-to-s3',
'--patch_id', '1234',
'--identifier', 'ios-simulator-12-x86_64-debug',
'--archive', 'public_html/archives/ios-simulator-12-x86_64-debug/1234.zip',
])
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Failed to transfer archive to S3')
return self.runStep()
class TestRunAPITests(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
self.jsonFileName = 'api_test_results.json'
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success_mac(self):
self.setupStep(RunAPITests())
self.setProperty('fullPlatform', 'mac-mojave')
self.setProperty('platform', 'mac')
self.setProperty('configuration', 'release')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/Scripts/run-api-tests', '--no-build', '--release', '--verbose', '--json-output={0}'.format(self.jsonFileName)],
logfiles={'json': self.jsonFileName},
)
+ ExpectShell.log('stdio', stdout='''...
worker/0 TestWTF.WTF_Variant.OperatorAmpersand Passed
worker/0 TestWTF.WTF_Variant.Ref Passed
worker/0 TestWTF.WTF_Variant.RefPtr Passed
worker/0 TestWTF.WTF_Variant.RetainPtr Passed
worker/0 TestWTF.WTF_Variant.VisitorUsingMakeVisitor Passed
worker/0 TestWTF.WTF_Variant.VisitorUsingSwitchOn Passed
Ran 1888 tests of 1888 with 1888 successful
------------------------------
All tests successfully passed!
''')
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='run-api-tests')
return self.runStep()
def test_success_ios_simulator(self):
self.setupStep(RunAPITests())
self.setProperty('fullPlatform', 'ios-simulator-11')
self.setProperty('platform', 'ios')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/Scripts/run-api-tests', '--no-build', '--debug', '--verbose', '--json-output={0}'.format(self.jsonFileName), '--ios-simulator'],
logfiles={'json': self.jsonFileName},
)
+ ExpectShell.log('stdio', stdout='''...
worker/0 TestWTF.WTF_Variant.OperatorAmpersand Passed
worker/0 TestWTF.WTF_Variant.Ref Passed
worker/0 TestWTF.WTF_Variant.RefPtr Passed
worker/0 TestWTF.WTF_Variant.RetainPtr Passed
worker/0 TestWTF.WTF_Variant.VisitorUsingMakeVisitor Passed
worker/0 TestWTF.WTF_Variant.VisitorUsingSwitchOn Passed
Ran 1888 tests of 1888 with 1888 successful
------------------------------
All tests successfully passed!
''')
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='run-api-tests')
return self.runStep()
def test_one_failure(self):
self.setupStep(RunAPITests())
self.setProperty('fullPlatform', 'mac-mojave')
self.setProperty('platform', 'mac')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/Scripts/run-api-tests', '--no-build', '--debug', '--verbose', '--json-output={0}'.format(self.jsonFileName)],
logfiles={'json': self.jsonFileName},
)
+ ExpectShell.log('stdio', stdout='''
worker/0 TestWTF.WTF_Variant.OperatorAmpersand Passed
worker/0 TestWTF.WTF_Variant.Ref Passed
worker/0 TestWTF.WTF_Variant.RefPtr Passed
worker/0 TestWTF.WTF_Variant.RetainPtr Passed
worker/0 TestWTF.WTF_Variant.VisitorUsingMakeVisitor Passed
worker/0 TestWTF.WTF_Variant.VisitorUsingSwitchOn Passed
worker/0 exiting
Ran 1888 tests of 1888 with 1887 successful
------------------------------
Test suite failed
Crashed
TestWTF.WTF.StringConcatenate_Unsigned
**FAIL** WTF.StringConcatenate_Unsigned
Tools\\TestWebKitAPI\\Tests\\WTF\\StringConcatenate.cpp:84
Value of: makeString('hello ', static_cast<unsigned short>(42) , ' world')
Actual: hello 42 world
Expected: 'hello * world'
Which is: 74B00C9C
Testing completed, Exit status: 3
''')
+ 1,
)
self.expectOutcome(result=FAILURE, state_string='1 api test failed or timed out (failure)')
return self.runStep()
def test_multiple_failures_and_timeouts(self):
self.setupStep(RunAPITests())
self.setProperty('fullPlatform', 'mac-mojave')
self.setProperty('platform', 'mac')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/Scripts/run-api-tests', '--no-build', '--debug', '--verbose', '--json-output={0}'.format(self.jsonFileName)],
logfiles={'json': self.jsonFileName},
)
+ ExpectShell.log('stdio', stdout='''...
worker/0 TestWTF.WTF_Variant.OperatorAmpersand Passed
worker/0 TestWTF.WTF_Variant.Ref Passed
worker/0 TestWTF.WTF_Variant.RefPtr Passed
worker/0 TestWTF.WTF_Variant.RetainPtr Passed
worker/0 TestWTF.WTF_Variant.VisitorUsingMakeVisitor Passed
worker/0 TestWTF.WTF_Variant.VisitorUsingSwitchOn Passed
worker/0 exiting
Ran 1888 tests of 1888 with 1884 successful
------------------------------
Test suite failed
Failed
TestWTF.WTF.StringConcatenate_Unsigned
**FAIL** WTF.StringConcatenate_Unsigned
Tools\\TestWebKitAPI\\Tests\\WTF\\StringConcatenate.cpp:84
Value of: makeString('hello ', static_cast<unsigned short>(42) , ' world')
Actual: hello 42 world
Expected: 'hello * world'
Which is: 74B00C9C
TestWTF.WTF_Expected.Unexpected
**FAIL** WTF_Expected.Unexpected
Tools\TestWebKitAPI\Tests\WTF\Expected.cpp:96
Value of: s1
Actual: oops
Expected: s0
Which is: oops
Timeout
TestWTF.WTF_PoisonedUniquePtrForTriviallyDestructibleArrays.Assignment
TestWTF.WTF_Lock.ContendedShortSection
Testing completed, Exit status: 3
''')
+ 4,
)
self.expectOutcome(result=FAILURE, state_string='4 api tests failed or timed out (failure)')
return self.runStep()
def test_unexpected_failure(self):
self.setupStep(RunAPITests())
self.setProperty('fullPlatform', 'mac-mojave')
self.setProperty('platform', 'mac')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/Scripts/run-api-tests', '--no-build', '--debug', '--verbose', '--json-output={0}'.format(self.jsonFileName)],
logfiles={'json': self.jsonFileName},
)
+ ExpectShell.log('stdio', stdout='Unexpected failure. Failed to run api tests.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='run-api-tests (failure)')
return self.runStep()
def test_no_failures_or_timeouts_with_disabled(self):
self.setupStep(RunAPITests())
self.setProperty('fullPlatform', 'mac-mojave')
self.setProperty('platform', 'mac')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/Scripts/run-api-tests', '--no-build', '--debug', '--verbose', '--json-output={0}'.format(self.jsonFileName)],
logfiles={'json': self.jsonFileName},
)
+ ExpectShell.log('stdio', stdout='''...
worker/0 TestWTF.WTF_Variant.OperatorAmpersand Passed
worker/0 TestWTF.WTF_Variant.Ref Passed
worker/0 TestWTF.WTF_Variant.RefPtr Passed
worker/0 TestWTF.WTF_Variant.RetainPtr Passed
worker/0 TestWTF.WTF_Variant.VisitorUsingMakeVisitor Passed
worker/0 TestWTF.WTF_Variant.VisitorUsingSwitchOn Passed
worker/0 exiting
Ran 1881 tests of 1888 with 1881 successful
------------------------------
All tests successfully passed!
''')
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='run-api-tests')
return self.runStep()
class TestArchiveTestResults(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(ArchiveTestResults())
self.setProperty('fullPlatform', 'ios-simulator')
self.setProperty('platform', 'ios-simulator')
self.setProperty('configuration', 'release')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/test-result-archive', '--platform=ios-simulator', '--release', 'archive'],
)
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Archived test results')
return self.runStep()
def test_failure(self):
self.setupStep(ArchiveTestResults())
self.setProperty('fullPlatform', 'mac-mojave')
self.setProperty('platform', 'mac')
self.setProperty('configuration', 'debug')
self.expectRemoteCommands(
ExpectShell(workdir='wkdir',
command=['python', 'Tools/BuildSlaveSupport/test-result-archive', '--platform=mac', '--debug', 'archive'],
)
+ ExpectShell.log('stdio', stdout='Unexpected failure.')
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='Archived test results (failure)')
return self.runStep()
class TestUploadTestResults(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(UploadTestResults())
self.setProperty('configuration', 'release')
self.setProperty('architecture', 'x86_64')
self.setProperty('patch_id', '1234')
self.setProperty('buildername', 'macOS-Sierra-Release-WK2-Tests-EWS')
self.setProperty('buildnumber', '12')
self.expectHidden(False)
self.expectRemoteCommands(
Expect('uploadFile', dict(
workersrc='layout-test-results.zip', workdir='wkdir',
blocksize=1024 * 256, maxsize=None, keepstamp=False,
writer=ExpectRemoteRef(remotetransfer.FileWriter),
))
+ Expect.behavior(uploadFileWithContentsOfString('Dummy zip file content.'))
+ 0,
)
self.expectUploadedFile('public_html/results/macOS-Sierra-Release-WK2-Tests-EWS/r1234-12.zip')
self.expectOutcome(result=SUCCESS, state_string='Uploaded test results')
return self.runStep()
class TestExtractTestResults(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(ExtractTestResults())
self.setProperty('configuration', 'release')
self.setProperty('patch_id', '1234')
self.setProperty('buildername', 'macOS-Sierra-Release-WK2-Tests-EWS')
self.setProperty('buildnumber', '12')
self.expectLocalCommands(
ExpectMasterShellCommand(command=['unzip',
'public_html/results/macOS-Sierra-Release-WK2-Tests-EWS/r1234-12.zip',
'-d',
'public_html/results/macOS-Sierra-Release-WK2-Tests-EWS/r1234-12',
])
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='Extracted test results')
self.expectAddedURLs([call('view layout test results', '/results/test/r2468_ab1a28b4feee0d42973c7c05335b35bca927e974 (1)/results.html')])
return self.runStep()
def test_failure(self):
self.setupStep(ExtractTestResults())
self.setProperty('configuration', 'debug')
self.setProperty('patch_id', '1234')
self.setProperty('buildername', 'macOS-Sierra-Release-WK2-Tests-EWS')
self.setProperty('buildnumber', '12')
self.expectLocalCommands(
ExpectMasterShellCommand(command=['unzip',
'public_html/results/macOS-Sierra-Release-WK2-Tests-EWS/r1234-12.zip',
'-d',
'public_html/results/macOS-Sierra-Release-WK2-Tests-EWS/r1234-12',
])
+ 2,
)
self.expectOutcome(result=FAILURE, state_string='failed (2) (failure)')
self.expectAddedURLs([call('view layout test results', '/results/test/r2468_ab1a28b4feee0d42973c7c05335b35bca927e974 (1)/results.html')])
return self.runStep()
class TestPrintConfiguration(BuildStepMixinAdditions, unittest.TestCase):
def setUp(self):
self.longMessage = True
return self.setUpBuildStep()
def tearDown(self):
return self.tearDownBuildStep()
def test_success(self):
self.setupStep(PrintConfiguration())
self.setProperty('buildername', 'macOS-Sierra-Release-WK2-Tests-EWS')
self.setProperty('platform', 'mac')
self.expectRemoteCommands(
ExpectShell(command=['hostname'], workdir='wkdir', timeout=60, logEnviron=False) + 0
+ ExpectShell.log('stdio', stdout='ews150.apple.com'),
ExpectShell(command=['df', '-hl'], workdir='wkdir', timeout=60, logEnviron=False) + 0
+ ExpectShell.log('stdio', stdout='''Filesystem Size Used Avail Capacity iused ifree %iused Mounted on
/dev/disk1s1 119Gi 95Gi 23Gi 81% 937959 9223372036853837848 0% /
/dev/disk1s4 119Gi 20Ki 23Gi 1% 0 9223372036854775807 0% /private/var/vm
/dev/disk0s3 119Gi 22Gi 97Gi 19% 337595 4294629684 0% /Volumes/Data'''),
ExpectShell(command=['date'], workdir='wkdir', timeout=60, logEnviron=False) + 0
+ ExpectShell.log('stdio', stdout='Tue Apr 9 15:30:52 PDT 2019'),
ExpectShell(command=['sw_vers'], workdir='wkdir', timeout=60, logEnviron=False) + 0
+ ExpectShell.log('stdio', stdout='''ProductName: Mac OS X
ProductVersion: 10.13.4
BuildVersion: 17E199'''),
ExpectShell(command=['xcodebuild', '-sdk', '-version'], workdir='wkdir', timeout=60, logEnviron=False)
+ ExpectShell.log('stdio', stdout='''MacOSX10.13.sdk - macOS 10.13 (macosx10.13)
SDKVersion: 10.13
Path: /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.13.sdk
PlatformVersion: 1.1
PlatformPath: /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform
ProductBuildVersion: 17E189
ProductCopyright: 1983-2018 Apple Inc.
ProductName: Mac OS X
ProductUserVisibleVersion: 10.13.4
ProductVersion: 10.13.4
Xcode 9.4.1
Build version 9F2000''')
+ 0,
)
self.expectOutcome(result=SUCCESS, state_string='OS: High Sierra (10.13.4), Xcode: 9.4.1')
return self.runStep()
def test_failure(self):
self.setupStep(PrintConfiguration())
self.setProperty('platform', 'mac')
self.expectRemoteCommands(
ExpectShell(command=['hostname'], workdir='wkdir', timeout=60, logEnviron=False) + 0,
ExpectShell(command=['df', '-hl'], workdir='wkdir', timeout=60, logEnviron=False) + 0,
ExpectShell(command=['date'], workdir='wkdir', timeout=60, logEnviron=False) + 0,
ExpectShell(command=['sw_vers'], workdir='wkdir', timeout=60, logEnviron=False) + 1
+ ExpectShell.log('stdio', stdout='''Upon execvpe sw_vers ['sw_vers'] in environment id 7696545650400
:Traceback (most recent call last):
File "/usr/lib/python2.7/site-packages/twisted/internet/process.py", line 445, in _fork
environment)
File "/usr/lib/python2.7/site-packages/twisted/internet/process.py", line 523, in _execChild
os.execvpe(executable, args, environment)
File "/usr/lib/python2.7/os.py", line 355, in execvpe
_execvpe(file, args, env)
File "/usr/lib/python2.7/os.py", line 382, in _execvpe
func(fullname, *argrest)
OSError: [Errno 2] No such file or directory'''),
ExpectShell(command=['xcodebuild', '-sdk', '-version'], workdir='wkdir', timeout=60, logEnviron=False)
+ ExpectShell.log('stdio', stdout='''Upon execvpe xcodebuild ['xcodebuild', '-sdk', '-version'] in environment id 7696545612416
:Traceback (most recent call last):
File "/usr/lib/python2.7/site-packages/twisted/internet/process.py", line 445, in _fork
environment)
File "/usr/lib/python2.7/site-packages/twisted/internet/process.py", line 523, in _execChild
os.execvpe(executable, args, environment)
File "/usr/lib/python2.7/os.py", line 355, in execvpe
_execvpe(file, args, env)
File "/usr/lib/python2.7/os.py", line 382, in _execvpe
func(fullname, *argrest)
OSError: [Errno 2] No such file or directory''')
+ 1,
)
self.expectOutcome(result=FAILURE, state_string='Failed to print configuration')
return self.runStep()
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]@268f45cc-cd09-0410-ab3c-d52691b4dbfc"
] |
[email protected]@268f45cc-cd09-0410-ab3c-d52691b4dbfc
|
abb119c8b27fc4e320f4d2c5e27a2aabd4ad94d0
|
23ef5d0633595817341b73c706db4a4c46e12354
|
/it/structures/python2/default_naming-default/upper_snake.py
|
4db7ebbc4fd69b28715f397a5b3bde174990a84c
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
c0ding/reproto
|
707eb25c8d28e6d052da6d428ca00bcd5617bb91
|
92f0a4b258095bc2f8a394d0bd44209e3a599c4f
|
refs/heads/master
| 2022-11-18T08:13:23.789214 | 2020-07-18T10:28:12 | 2020-07-18T10:28:29 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 571 |
py
|
class Value:
def __init__(self, _foo_bar):
self._foo_bar = _foo_bar
@property
def foo_bar(self):
return self._foo_bar
@staticmethod
def decode(data):
f_foo_bar = data["FOO_BAR"]
if not isinstance(f_foo_bar, unicode):
raise Exception("not a string")
return Value(f_foo_bar)
def encode(self):
data = dict()
if self._foo_bar is None:
raise Exception("FOO_BAR: is a required field")
data["FOO_BAR"] = self._foo_bar
return data
def __repr__(self):
return "<Value foo_bar:{!r}>".format(self._foo_bar)
|
[
"[email protected]"
] | |
55d9f917d9b308fe78ca6904fd50850bdeeef740
|
a206e5d97bf4da00722ee8841fe2e0c2884d1c92
|
/feature.py
|
36107eba78f7ffc5f82ecfc9665070df891c70c6
|
[] |
no_license
|
mikbuch/eeg_02_interface
|
969bb925758a330dbd287ad71e787bd0faa5ddf8
|
7efd6649c2176fba50c3113570697dc7f95ef9a4
|
refs/heads/master
| 2016-09-06T14:29:22.842641 | 2015-04-23T23:21:10 | 2015-04-23T23:21:10 | 34,334,702 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 581 |
py
|
#!/usr/bin/env python
from statlib import stats
# zero crossing and negative sum
def zero_negative(package):
negative = 0
zero_crossed = 0
for sample in range(len(package)):
if package[sample] > 0:
if sample != len(package) - 1:
if package[sample + 1] < 0:
zero_crossed += 1
else:
negative += package[sample]
if sample != len(package) - 1:
if package[sample + 1] > 0:
zero_crossed += 1
return stats.stdev(package), negative, zero_crossed
|
[
"[email protected]"
] | |
3f6ca5ca2b2d64456a2492ced90b6bbbb0e9dfe1
|
2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae
|
/python/python_20254.py
|
01dbac24564cec0d82afc53fd107f040f7941a65
|
[] |
no_license
|
AK-1121/code_extraction
|
cc812b6832b112e3ffcc2bb7eb4237fd85c88c01
|
5297a4a3aab3bb37efa24a89636935da04a1f8b6
|
refs/heads/master
| 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 64 |
py
|
# How is order of items in matplotlib legend determined?
self.*
|
[
"[email protected]"
] | |
06637c07f994b92112d2115b4c12d9bd35b01444
|
9703641c14b7c19f2fcf937150204ab85b4151a2
|
/code pratice/设计指定长度随机密码.py
|
6631266c946ef35f46c0ea5935dca28c3dfc7679
|
[] |
no_license
|
walkmiao/Little_Case
|
8effbea554c930e0eb32d4335ecbd5541a9c1251
|
ab445659e19c85ecfd9b99f8d615c33f900662f8
|
refs/heads/master
| 2021-06-11T05:30:39.415720 | 2019-05-14T10:37:29 | 2019-05-14T10:37:29 | 128,582,484 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 741 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File : 设计指定长度随机密码.py
# @Author: lch
# @Date : 2018/10/10
# @Desc :
"""
题目2: 设计一个函数,生成指定长度的验证码(由数字和大小写英文字母构成的随机字符串)
"""
import random
def generate_code(length=6):
caplize = [ chr(i) for i in range(ord('a'), ord('z')+1)]
low = [ chr(i) for i in range(ord('A'), ord('Z')+1)]
num_list = [i for i in range(10)]
len_cap = random.randint(1, length-2)
len_low = random.randint(1, length-len_cap-1)
len_num = length - len_cap - len_low
return random.sample(caplize, len_cap) + random.sample(low, len_low) + random.sample(num_list, len_num)
print(generate_code())
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.