content
stringlengths 7
928k
| avg_line_length
float64 3.5
33.8k
| max_line_length
int64 6
139k
| alphanum_fraction
float64 0.08
0.96
| licenses
sequence | repository_name
stringlengths 7
104
| path
stringlengths 4
230
| size
int64 7
928k
| lang
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|
"""Test UniFi config flow."""
from unittest.mock import patch
import aiounifi
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.unifi.const import (
CONF_ALLOW_BANDWIDTH_SENSORS,
CONF_ALLOW_UPTIME_SENSORS,
CONF_BLOCK_CLIENT,
CONF_CONTROLLER,
CONF_DETECTION_TIME,
CONF_DPI_RESTRICTIONS,
CONF_IGNORE_WIRED_BUG,
CONF_POE_CLIENTS,
CONF_SITE_ID,
CONF_SSID_FILTER,
CONF_TRACK_CLIENTS,
CONF_TRACK_DEVICES,
CONF_TRACK_WIRED_CLIENTS,
DOMAIN as UNIFI_DOMAIN,
)
from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
CONF_VERIFY_SSL,
CONTENT_TYPE_JSON,
)
from .test_controller import setup_unifi_integration
from tests.common import MockConfigEntry
CLIENTS = [{"mac": "00:00:00:00:00:01"}]
DEVICES = [
{
"board_rev": 21,
"device_id": "mock-id",
"ip": "10.0.1.1",
"last_seen": 0,
"mac": "00:00:00:00:01:01",
"model": "U7PG2",
"name": "access_point",
"state": 1,
"type": "uap",
"version": "4.0.80.10875",
"wlan_overrides": [
{
"name": "SSID 3",
"radio": "na",
"radio_name": "wifi1",
"wlan_id": "012345678910111213141516",
},
{
"name": "",
"radio": "na",
"radio_name": "wifi1",
"wlan_id": "012345678910111213141516",
},
{
"radio": "na",
"radio_name": "wifi1",
"wlan_id": "012345678910111213141516",
},
],
}
]
WLANS = [
{"name": "SSID 1"},
{"name": "SSID 2", "name_combine_enabled": False, "name_combine_suffix": "_IOT"},
]
DPI_GROUPS = [
{
"_id": "5ba29dd8e3c58f026e9d7c4a",
"name": "Default",
"site_id": "5ba29dd4e3c58f026e9d7c38",
},
]
async def test_flow_works(hass, aioclient_mock, mock_discovery):
"""Test config flow."""
mock_discovery.return_value = "1"
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
assert result["data_schema"]({CONF_USERNAME: "", CONF_PASSWORD: ""}) == {
CONF_HOST: "unifi",
CONF_USERNAME: "",
CONF_PASSWORD: "",
CONF_PORT: 443,
CONF_VERIFY_SSL: False,
}
aioclient_mock.get("https://1.2.3.4:1234", status=302)
aioclient_mock.post(
"https://1.2.3.4:1234/api/login",
json={"data": "login successful", "meta": {"rc": "ok"}},
headers={"content-type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
"https://1.2.3.4:1234/api/self/sites",
json={
"data": [{"desc": "Site name", "name": "site_id", "role": "admin"}],
"meta": {"rc": "ok"},
},
headers={"content-type": CONTENT_TYPE_JSON},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "Site name"
assert result["data"] == {
CONF_CONTROLLER: {
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_SITE_ID: "site_id",
CONF_VERIFY_SSL: True,
}
}
async def test_flow_works_multiple_sites(hass, aioclient_mock):
"""Test config flow works when finding multiple sites."""
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
aioclient_mock.get("https://1.2.3.4:1234", status=302)
aioclient_mock.post(
"https://1.2.3.4:1234/api/login",
json={"data": "login successful", "meta": {"rc": "ok"}},
headers={"content-type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
"https://1.2.3.4:1234/api/self/sites",
json={
"data": [
{"name": "default", "role": "admin", "desc": "site name"},
{"name": "site2", "role": "admin", "desc": "site2 name"},
],
"meta": {"rc": "ok"},
},
headers={"content-type": CONTENT_TYPE_JSON},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "site"
assert result["data_schema"]({"site": "default"})
assert result["data_schema"]({"site": "site2"})
async def test_flow_raise_already_configured(hass, aioclient_mock):
"""Test config flow aborts since a connected config entry already exists."""
await setup_unifi_integration(hass)
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
aioclient_mock.get("https://1.2.3.4:1234", status=302)
aioclient_mock.post(
"https://1.2.3.4:1234/api/login",
json={"data": "login successful", "meta": {"rc": "ok"}},
headers={"content-type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
"https://1.2.3.4:1234/api/self/sites",
json={
"data": [{"desc": "Site name", "name": "site_id", "role": "admin"}],
"meta": {"rc": "ok"},
},
headers={"content-type": CONTENT_TYPE_JSON},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_flow_aborts_configuration_updated(hass, aioclient_mock):
"""Test config flow aborts since a connected config entry already exists."""
entry = MockConfigEntry(
domain=UNIFI_DOMAIN, data={"controller": {"host": "1.2.3.4", "site": "office"}}
)
entry.add_to_hass(hass)
entry = MockConfigEntry(
domain=UNIFI_DOMAIN, data={"controller": {"host": "1.2.3.4", "site": "site_id"}}
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
aioclient_mock.get("https://1.2.3.4:1234", status=302)
aioclient_mock.post(
"https://1.2.3.4:1234/api/login",
json={"data": "login successful", "meta": {"rc": "ok"}},
headers={"content-type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
"https://1.2.3.4:1234/api/self/sites",
json={
"data": [{"desc": "Site name", "name": "site_id", "role": "admin"}],
"meta": {"rc": "ok"},
},
headers={"content-type": CONTENT_TYPE_JSON},
)
with patch("homeassistant.components.unifi.async_setup_entry"):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "configuration_updated"
async def test_flow_fails_user_credentials_faulty(hass, aioclient_mock):
"""Test config flow."""
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
aioclient_mock.get("https://1.2.3.4:1234", status=302)
with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.Unauthorized):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "faulty_credentials"}
async def test_flow_fails_controller_unavailable(hass, aioclient_mock):
"""Test config flow."""
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
aioclient_mock.get("https://1.2.3.4:1234", status=302)
with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "username",
CONF_PASSWORD: "password",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "service_unavailable"}
async def test_reauth_flow_update_configuration(hass, aioclient_mock):
"""Verify reauth flow can update controller configuration."""
controller = await setup_unifi_integration(hass)
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN,
context={"source": SOURCE_REAUTH},
data=controller.config_entry,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == SOURCE_USER
aioclient_mock.get("https://1.2.3.4:1234", status=302)
aioclient_mock.post(
"https://1.2.3.4:1234/api/login",
json={"data": "login successful", "meta": {"rc": "ok"}},
headers={"content-type": CONTENT_TYPE_JSON},
)
aioclient_mock.get(
"https://1.2.3.4:1234/api/self/sites",
json={
"data": [{"desc": "Site name", "name": "site_id", "role": "admin"}],
"meta": {"rc": "ok"},
},
headers={"content-type": CONTENT_TYPE_JSON},
)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={
CONF_HOST: "1.2.3.4",
CONF_USERNAME: "new_name",
CONF_PASSWORD: "new_pass",
CONF_PORT: 1234,
CONF_VERIFY_SSL: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "reauth_successful"
assert controller.host == "1.2.3.4"
assert controller.config_entry.data[CONF_CONTROLLER][CONF_USERNAME] == "new_name"
assert controller.config_entry.data[CONF_CONTROLLER][CONF_PASSWORD] == "new_pass"
async def test_advanced_option_flow(hass):
"""Test advanced config flow options."""
controller = await setup_unifi_integration(
hass,
clients_response=CLIENTS,
devices_response=DEVICES,
wlans_response=WLANS,
dpigroup_response=DPI_GROUPS,
dpiapp_response=[],
)
result = await hass.config_entries.options.async_init(
controller.config_entry.entry_id, context={"show_advanced_options": True}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "device_tracker"
assert set(
result["data_schema"].schema[CONF_SSID_FILTER].options.keys()
).intersection(("SSID 1", "SSID 2", "SSID 2_IOT", "SSID 3"))
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_TRACK_CLIENTS: False,
CONF_TRACK_WIRED_CLIENTS: False,
CONF_TRACK_DEVICES: False,
CONF_SSID_FILTER: ["SSID 1", "SSID 2_IOT", "SSID 3"],
CONF_DETECTION_TIME: 100,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "client_control"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_BLOCK_CLIENT: [CLIENTS[0]["mac"]],
CONF_POE_CLIENTS: False,
CONF_DPI_RESTRICTIONS: False,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "statistics_sensors"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_ALLOW_BANDWIDTH_SENSORS: True,
CONF_ALLOW_UPTIME_SENSORS: True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] == {
CONF_TRACK_CLIENTS: False,
CONF_TRACK_WIRED_CLIENTS: False,
CONF_TRACK_DEVICES: False,
CONF_SSID_FILTER: ["SSID 1", "SSID 2_IOT", "SSID 3"],
CONF_DETECTION_TIME: 100,
CONF_IGNORE_WIRED_BUG: False,
CONF_POE_CLIENTS: False,
CONF_DPI_RESTRICTIONS: False,
CONF_BLOCK_CLIENT: [CLIENTS[0]["mac"]],
CONF_ALLOW_BANDWIDTH_SENSORS: True,
CONF_ALLOW_UPTIME_SENSORS: True,
}
async def test_simple_option_flow(hass):
"""Test simple config flow options."""
controller = await setup_unifi_integration(
hass,
clients_response=CLIENTS,
wlans_response=WLANS,
dpigroup_response=DPI_GROUPS,
dpiapp_response=[],
)
result = await hass.config_entries.options.async_init(
controller.config_entry.entry_id, context={"show_advanced_options": False}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "simple_options"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
CONF_TRACK_CLIENTS: False,
CONF_TRACK_DEVICES: False,
CONF_BLOCK_CLIENT: [CLIENTS[0]["mac"]],
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] == {
CONF_TRACK_CLIENTS: False,
CONF_TRACK_DEVICES: False,
CONF_BLOCK_CLIENT: [CLIENTS[0]["mac"]],
}
async def test_form_ssdp(hass):
"""Test we get the form with ssdp source."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN,
context={"source": config_entries.SOURCE_SSDP},
data={
"friendlyName": "UniFi Dream Machine",
"modelDescription": "UniFi Dream Machine Pro",
"ssdp_location": "http://192.168.208.1:41417/rootDesc.xml",
"serialNumber": "e0:63:da:20:14:a9",
},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
context = next(
flow["context"]
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert context["title_placeholders"] == {
"host": "192.168.208.1",
"site": "default",
}
async def test_form_ssdp_aborts_if_host_already_exists(hass):
"""Test we abort if the host is already configured."""
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(
domain=UNIFI_DOMAIN,
data={"controller": {"host": "192.168.208.1", "site": "site_id"}},
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN,
context={"source": config_entries.SOURCE_SSDP},
data={
"friendlyName": "UniFi Dream Machine",
"modelDescription": "UniFi Dream Machine Pro",
"ssdp_location": "http://192.168.208.1:41417/rootDesc.xml",
"serialNumber": "e0:63:da:20:14:a9",
},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_form_ssdp_aborts_if_serial_already_exists(hass):
"""Test we abort if the serial is already configured."""
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(
domain=UNIFI_DOMAIN,
data={"controller": {"host": "1.2.3.4", "site": "site_id"}},
unique_id="e0:63:da:20:14:a9",
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN,
context={"source": config_entries.SOURCE_SSDP},
data={
"friendlyName": "UniFi Dream Machine",
"modelDescription": "UniFi Dream Machine Pro",
"ssdp_location": "http://192.168.208.1:41417/rootDesc.xml",
"serialNumber": "e0:63:da:20:14:a9",
},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_form_ssdp_gets_form_with_ignored_entry(hass):
"""Test we can still setup if there is an ignored entry."""
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(
domain=UNIFI_DOMAIN,
data={"not_controller_key": None},
source=config_entries.SOURCE_IGNORE,
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
UNIFI_DOMAIN,
context={"source": config_entries.SOURCE_SSDP},
data={
"friendlyName": "UniFi Dream Machine New",
"modelDescription": "UniFi Dream Machine Pro",
"ssdp_location": "http://1.2.3.4:41417/rootDesc.xml",
"serialNumber": "e0:63:da:20:14:a9",
},
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert result["errors"] == {}
context = next(
flow["context"]
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert context["title_placeholders"] == {
"host": "1.2.3.4",
"site": "default",
}
| 31.936455 | 88 | 0.60221 | [
"Apache-2.0"
] | Nixon506E/home-assistant | tests/components/unifi/test_config_flow.py | 19,098 | Python |
import math
import random
import time
import argparse
from sklearn import preprocessing
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score, precision_score, recall_score, roc_curve, auc
import pandas as pd
import numpy as np
import torch.nn as nn
import torch
from LR import LR
from DBN import DBN
parser = argparse.ArgumentParser()
parser.add_argument('-project', type=str,
default='qt')
parser.add_argument('-data', type=str,
default='k')
parser.add_argument('-algorithm', type=str,
default='lr')
parser.add_argument('-drop', type=str,
default='')
parser.add_argument('-only', nargs='+',
default=[])
def evaluation_metrics(y_true, y_pred):
fpr, tpr, thresholds = roc_curve(y_true=y_true, y_score=y_pred, pos_label=1)
auc_ = auc(fpr, tpr)
y_pred = [1 if p >= 0.5 else 0 for p in y_pred]
acc = accuracy_score(y_true=y_true, y_pred=y_pred)
prc = precision_score(y_true=y_true, y_pred=y_pred)
rc = recall_score(y_true=y_true, y_pred=y_pred)
# f1 = 2 * prc * rc / (prc + rc)
f1 = 0
return acc, prc, rc, f1, auc_
def replace_value_dataframe(df):
df = df.replace({True: 1, False: 0})
df = df.fillna(df.mean())
if args.drop:
df = df.drop(columns=[args.drop])
elif args.only:
df = df[['Unnamed: 0','_id','date','bug','__'] + args.only]
return df.values
def get_features(data):
# return the features of yasu data
return data[:, 5:]
def get_ids(data):
# return the labels of yasu data
return data[:, 1:2].flatten().tolist()
def get_label(data):
data = data[:, 3:4].flatten().tolist()
data = [1 if int(d) > 0 else 0 for d in data]
return data
def load_df_yasu_data(path_data):
data = pd.read_csv(path_data)
data = replace_value_dataframe(df=data)
ids, labels, features = get_ids(data=data), get_label(data=data), get_features(data=data)
indexes = list()
cnt_noexits = 0
for i in range(0, len(ids)):
try:
indexes.append(i)
except FileNotFoundError:
print('File commit id no exits', ids[i], cnt_noexits)
cnt_noexits += 1
ids = [ids[i] for i in indexes]
labels = [labels[i] for i in indexes]
features = features[indexes]
return (ids, np.array(labels), features)
def load_yasu_data(args):
train_path_data = 'data/{}/{}_train.csv'.format(args.project, args.data)
test_path_data = 'data/{}/{}_test.csv'.format(args.project, args.data)
train, test = load_df_yasu_data(train_path_data), load_df_yasu_data(test_path_data)
return train, test
def train_and_evl(data, label, args):
size = int(label.shape[0]*0.2)
auc_ = []
for i in range(5):
idx = size * i
X_e = data[idx:idx+size]
y_e = label[idx:idx+size]
X_t = np.vstack((data[:idx], data[idx+size:]))
y_t = np.hstack((label[:idx], label[idx+size:]))
model = LogisticRegression(max_iter=7000).fit(X_t, y_t)
y_pred = model.predict_proba(X_e)[:, 1]
fpr, tpr, thresholds = roc_curve(y_true=y_e, y_score=y_pred, pos_label=1)
auc_.append(auc(fpr, tpr))
return np.mean(auc_)
def mini_batches_update(X, Y, mini_batch_size=64, seed=0):
m = X.shape[0] # number of training examples
mini_batches = list()
np.random.seed(seed)
# Step 1: No shuffle (X, Y)
shuffled_X, shuffled_Y = X, Y
Y = Y.tolist()
Y_pos = [i for i in range(len(Y)) if Y[i] == 1]
Y_neg = [i for i in range(len(Y)) if Y[i] == 0]
# Step 2: Randomly pick mini_batch_size / 2 from each of positive and negative labels
num_complete_minibatches = int(math.floor(m / float(mini_batch_size))) + 1
for k in range(0, num_complete_minibatches):
indexes = sorted(
random.sample(Y_pos, int(mini_batch_size / 2)) + random.sample(Y_neg, int(mini_batch_size / 2)))
mini_batch_X, mini_batch_Y = shuffled_X[indexes], shuffled_Y[indexes]
mini_batch = (mini_batch_X, mini_batch_Y)
mini_batches.append(mini_batch)
return mini_batches
def mini_batches(X, Y, mini_batch_size=64, seed=0):
m = X.shape[0] # number of training examples
mini_batches = list()
np.random.seed(seed)
# Step 1: No shuffle (X, Y)
shuffled_X, shuffled_Y = X, Y
# Step 2: Partition (X, Y). Minus the end case.
# number of mini batches of size mini_batch_size in your partitioning
num_complete_minibatches = int(math.floor(m / float(mini_batch_size)))
for k in range(0, num_complete_minibatches):
mini_batch_X = shuffled_X[k * mini_batch_size: k * mini_batch_size + mini_batch_size, :]
if len(Y.shape) == 1:
mini_batch_Y = shuffled_Y[k * mini_batch_size: k * mini_batch_size + mini_batch_size]
else:
mini_batch_Y = shuffled_Y[k * mini_batch_size: k * mini_batch_size + mini_batch_size, :]
mini_batch = (mini_batch_X, mini_batch_Y)
mini_batches.append(mini_batch)
# Handling the end case (last mini-batch < mini_batch_size)
if m % mini_batch_size != 0:
mini_batch_X = shuffled_X[num_complete_minibatches * mini_batch_size: m, :]
if len(Y.shape) == 1:
mini_batch_Y = shuffled_Y[num_complete_minibatches * mini_batch_size: m]
else:
mini_batch_Y = shuffled_Y[num_complete_minibatches * mini_batch_size: m, :]
mini_batch = (mini_batch_X, mini_batch_Y)
mini_batches.append(mini_batch)
return mini_batches
def DBN_JIT(train_features, train_labels, test_features, test_labels, hidden_units=[20, 12, 12], num_epochs_LR=200):
# training DBN model
#################################################################################################
starttime = time.time()
dbn_model = DBN(visible_units=train_features.shape[1],
hidden_units=hidden_units,
use_gpu=False)
dbn_model.train_static(train_features, train_labels, num_epochs=10)
# Finishing the training DBN model
# print('---------------------Finishing the training DBN model---------------------')
# using DBN model to construct features
DBN_train_features, _ = dbn_model.forward(train_features)
DBN_test_features, _ = dbn_model.forward(test_features)
DBN_train_features = DBN_train_features.numpy()
DBN_test_features = DBN_test_features.numpy()
train_features = np.hstack((train_features, DBN_train_features))
test_features = np.hstack((test_features, DBN_test_features))
if len(train_labels.shape) == 1:
num_classes = 1
else:
num_classes = train_labels.shape[1]
# lr_model = LR(input_size=hidden_units, num_classes=num_classes)
lr_model = LR(input_size=train_features.shape[1], num_classes=num_classes)
optimizer = torch.optim.Adam(lr_model.parameters(), lr=0.00001)
steps = 0
batches_test = mini_batches(X=test_features, Y=test_labels)
for epoch in range(1, num_epochs_LR + 1):
# building batches for training model
batches_train = mini_batches_update(X=train_features, Y=train_labels)
for batch in batches_train:
x_batch, y_batch = batch
x_batch, y_batch = torch.tensor(x_batch).float(), torch.tensor(y_batch).float()
optimizer.zero_grad()
predict = lr_model.forward(x_batch)
loss = nn.BCELoss()
loss = loss(predict, y_batch)
loss.backward()
optimizer.step()
# steps += 1
# if steps % 100 == 0:
# print('\rEpoch: {} step: {} - loss: {:.6f}'.format(epoch, steps, loss.item()))
endtime = time.time()
dtime = endtime - starttime
print("Train Time: %.8s s" % dtime) #显示到微秒
starttime = time.time()
y_pred, lables = lr_model.predict(data=batches_test)
endtime = time.time()
dtime = endtime - starttime
print("Eval Time: %.8s s" % dtime) #显示到微秒
return y_pred
def baseline_algorithm(train, test, algorithm, only=False):
_, y_train, X_train = train
_, y_test, X_test = test
X_train, X_test = preprocessing.scale(X_train), preprocessing.scale(X_test)
acc, prc, rc, f1, auc_ = 0, 0, 0, 0, 0
if algorithm == 'lr':
starttime = time.time()
model = LogisticRegression(max_iter=7000).fit(X_train, y_train)
endtime = time.time()
dtime = endtime - starttime
print("Train Time: %.8s s" % dtime) #显示到微秒
starttime = time.time()
y_pred = model.predict_proba(X_test)[:, 1]
endtime = time.time()
dtime = endtime - starttime
print("Eval Time: %.8s s" % dtime) #显示到微秒
acc, prc, rc, f1, auc_ = evaluation_metrics(y_true=y_test, y_pred=y_pred)
if only and not "cross" in args.data:
auc_ = train_and_evl(X_train, y_train, args)
print('Accuracy: %f -- Precision: %f -- Recall: %f -- F1: %f -- AUC: %f' % (acc, prc, rc, f1, auc_))
elif algorithm =='dbn':
y_pred = DBN_JIT(X_train, y_train, X_test, y_test)
acc, prc, rc, f1, auc_ = evaluation_metrics(y_true=y_test, y_pred=y_pred)
acc, prc, rc, f1 = 0, 0, 0, 0
print('Accuracy: %f -- Precision: %f -- Recall: %f -- F1: %f -- AUC: %f' % (acc, prc, rc, f1, auc_))
else:
print('You need to give the correct algorithm name')
return
return y_test, y_pred
def save_result(labels, predicts, path):
results = []
for lable, predict in zip(labels, predicts):
results.append('{}\t{}\n'.format(lable, predict))
with open(path, 'w', encoding='utf-8') as f:
f.writelines(results)
if __name__ == '__main__':
args = parser.parse_args()
save_path = 'result/{}/{}_{}_{}.result'.format(args.project, args.project, args.algorithm, args.data.replace("/","_"))
only = True if args.only else False
if args.algorithm == 'la':
args.algorithm = 'lr'
args.only = ['la']
if "all" in args.only:
args.only.remove("all")
train, test = load_yasu_data(args)
labels, predicts = baseline_algorithm(train=train, test=test, algorithm=args.algorithm, only=only)
if not only:
save_result(labels, predicts, save_path) | 35.402062 | 122 | 0.631722 | [
"MIT"
] | ZZR0/ISSTA21-JIT-DP | JIT_Baseline/baseline.py | 10,342 | Python |
"""
Support the sensor of a BloomSky weather station.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/sensor.bloomsky/
"""
import logging
from homeassistant.const import TEMP_FAHRENHEIT
from homeassistant.helpers.entity import Entity
from homeassistant.loader import get_component
DEPENDENCIES = ["bloomsky"]
# These are the available sensors
SENSOR_TYPES = ["Temperature",
"Humidity",
"Pressure",
"Luminance",
"UVIndex"]
# Sensor units - these do not currently align with the API documentation
SENSOR_UNITS = {"Temperature": TEMP_FAHRENHEIT,
"Humidity": "%",
"Pressure": "inHg",
"Luminance": "cd/m²"}
# Which sensors to format numerically
FORMAT_NUMBERS = ["Temperature", "Pressure"]
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the available BloomSky weather sensors."""
logger = logging.getLogger(__name__)
bloomsky = get_component('bloomsky')
sensors = config.get('monitored_conditions', SENSOR_TYPES)
for device in bloomsky.BLOOMSKY.devices.values():
for variable in sensors:
if variable in SENSOR_TYPES:
add_devices([BloomSkySensor(bloomsky.BLOOMSKY,
device,
variable)])
else:
logger.error("Cannot find definition for device: %s", variable)
class BloomSkySensor(Entity):
"""Representation of a single sensor in a BloomSky device."""
def __init__(self, bs, device, sensor_name):
"""Initialize a bloomsky sensor."""
self._bloomsky = bs
self._device_id = device["DeviceID"]
self._sensor_name = sensor_name
self._name = "{} {}".format(device["DeviceName"], sensor_name)
self._unique_id = "bloomsky_sensor {}".format(self._name)
self.update()
@property
def name(self):
"""The name of the BloomSky device and this sensor."""
return self._name
@property
def unique_id(self):
"""Return the unique ID for this sensor."""
return self._unique_id
@property
def state(self):
"""The current state, eg. value, of this sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the sensor units."""
return SENSOR_UNITS.get(self._sensor_name, None)
def update(self):
"""Request an update from the BloomSky API."""
self._bloomsky.refresh_devices()
state = \
self._bloomsky.devices[self._device_id]["Data"][self._sensor_name]
if self._sensor_name in FORMAT_NUMBERS:
self._state = "{0:.2f}".format(state)
else:
self._state = state
| 31.663043 | 79 | 0.625472 | [
"MIT"
] | 1lann/home-assistant | homeassistant/components/sensor/bloomsky.py | 2,914 | Python |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.mark import parametrize
from ducktape.utils.util import wait_until
from ducktape.mark.resource import cluster
from kafkatest.services.console_consumer import ConsoleConsumer
from kafkatest.services.kafka import KafkaService
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
from kafkatest.utils import is_int
from kafkatest.version import LATEST_0_9, LATEST_0_10, LATEST_0_11, DEV_BRANCH, KafkaVersion
class MessageFormatChangeTest(ProduceConsumeValidateTest):
def __init__(self, test_context):
super(MessageFormatChangeTest, self).__init__(test_context=test_context)
def setUp(self):
self.topic = "test_topic"
self.zk = ZookeeperService(self.test_context, num_nodes=1)
self.zk.start()
# Producer and consumer
self.producer_throughput = 10000
self.num_producers = 1
self.num_consumers = 1
self.messages_per_producer = 100
def produce_and_consume(self, producer_version, consumer_version, group):
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka,
self.topic,
throughput=self.producer_throughput,
message_validator=is_int,
version=KafkaVersion(producer_version))
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka,
self.topic, consumer_timeout_ms=30000,
message_validator=is_int, version=KafkaVersion(consumer_version))
self.consumer.group_id = group
self.run_produce_consume_validate(lambda: wait_until(
lambda: self.producer.each_produced_at_least(self.messages_per_producer) == True,
timeout_sec=120, backoff_sec=1,
err_msg="Producer did not produce all messages in reasonable amount of time"))
@cluster(num_nodes=12)
@parametrize(producer_version=str(DEV_BRANCH), consumer_version=str(DEV_BRANCH))
@parametrize(producer_version=str(LATEST_0_10), consumer_version=str(LATEST_0_10))
@parametrize(producer_version=str(LATEST_0_9), consumer_version=str(LATEST_0_9))
def test_compatibility(self, producer_version, consumer_version):
""" This tests performs the following checks:
The workload is a mix of 0.9.x, 0.10.x and 0.11.x producers and consumers
that produce to and consume from a DEV_BRANCH cluster
1. initially the topic is using message format 0.9.0
2. change the message format version for topic to 0.10.0 on the fly.
3. change the message format version for topic to 0.11.0 on the fly.
4. change the message format version for topic back to 0.10.0 on the fly (only if the client version is 0.11.0 or newer)
- The producers and consumers should not have any issue.
Note regarding step number 4. Downgrading the message format version is generally unsupported as it breaks
older clients. More concretely, if we downgrade a topic from 0.11.0 to 0.10.0 after it contains messages with
version 0.11.0, we will return the 0.11.0 messages without down conversion due to an optimisation in the
handling of fetch requests. This will break any consumer that doesn't support 0.11.0. So, in practice, step 4
is similar to step 2 and it didn't seem worth it to increase the cluster size to in order to add a step 5 that
would change the message format version for the topic back to 0.9.0.0.
"""
self.kafka = KafkaService(self.test_context, num_nodes=3, zk=self.zk, version=DEV_BRANCH, topics={self.topic: {
"partitions": 3,
"replication-factor": 3,
'configs': {"min.insync.replicas": 2}}})
self.kafka.start()
self.logger.info("First format change to 0.9.0")
self.kafka.alter_message_format(self.topic, str(LATEST_0_9))
self.produce_and_consume(producer_version, consumer_version, "group1")
self.logger.info("Second format change to 0.10.0")
self.kafka.alter_message_format(self.topic, str(LATEST_0_10))
self.produce_and_consume(producer_version, consumer_version, "group2")
self.logger.info("Third format change to 0.11.0")
self.kafka.alter_message_format(self.topic, str(LATEST_0_11))
self.produce_and_consume(producer_version, consumer_version, "group3")
if producer_version == str(DEV_BRANCH) and consumer_version == str(DEV_BRANCH):
self.logger.info("Fourth format change back to 0.10.0")
self.kafka.alter_message_format(self.topic, str(LATEST_0_10))
self.produce_and_consume(producer_version, consumer_version, "group4")
| 55.504854 | 128 | 0.682526 | [
"Apache-2.0"
] | 1810824959/kafka | tests/kafkatest/tests/client/message_format_change_test.py | 5,717 | Python |
# -*- coding: utf-8 -*-
"""
[googleSearch.py]
Google Search Plugin
[Author]
Justin Walker
[About]
Returns the first three links from a google search.
[Commands]
>>> .google <<search term>>
returns search links
"""
try:
from googlesearch import search
except ImportError:
print("No module named 'google' found")
class Plugin:
def __init__(self):
pass
def __google(search_term):
# start is what link to start with, stop is how many links to get
# only_standard limits it to normal links instead of ads and extra
# links.
return search(search_term, start=1, stop=3, \
only_standard=True)
def run(self, incoming, methods, info):
try:
msgs = info['args'][1:][0].split()
if info['command'] == 'PRIVMSG' and msgs[0] == '.google':
# All further messages, if there are any are added to search term.
term = ''
if len(msgs) > 1:
for msg in msgs[1:]:
term += msg
for link in Plugin.__google(term):
methods['send'](info['address'], link)
else:
methods['send'](info['address'], "Input error. '.google search_term'.")
except Exception as e:
print('woops plugin error: ', e) | 28.7 | 92 | 0.524739 | [
"MIT"
] | IronPenguin4179/honeybot | honeybot/plugins/google.py | 1,435 | Python |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""End-to-end tests that check model correctness."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import unittest
import numpy as np
import tensorflow as tf
from tensorflow.compat import v1 as tfv1
# pylint: disable=g-bad-import-order
from tfltransfer import bases
from tfltransfer import optimizers
from tfltransfer import heads
from tfltransfer import tflite_transfer_converter
# pylint: enable=g-bad-import-order
IMAGE_SIZE = 224
BATCH_SIZE = 128
NUM_CLASSES = 5
VALIDATION_SPLIT = 0.2
LEARNING_RATE = 0.001
BOTTLENECK_SHAPE = (7, 7, 1280)
DATASET_URL = 'https://storage.googleapis.com/download.tensorflow.org/example_images/flower_photos.tgz'
class TransferModel(object):
"""Test consumer of models generated by the converter."""
def __init__(self, dataset_dir, base_model, head_model, optimizer):
"""Creates a wrapper for a set of models and a data set."""
self.dataset_dir = dataset_dir
datagen = tf.keras.preprocessing.image.ImageDataGenerator(
rescale=1. / 255, validation_split=VALIDATION_SPLIT)
self.train_img_generator = datagen.flow_from_directory(
self.dataset_dir,
target_size=(IMAGE_SIZE, IMAGE_SIZE),
batch_size=BATCH_SIZE,
subset='training')
self.val_img_generator = datagen.flow_from_directory(
self.dataset_dir,
target_size=(IMAGE_SIZE, IMAGE_SIZE),
batch_size=BATCH_SIZE,
subset='validation')
converter = tflite_transfer_converter.TFLiteTransferConverter(
NUM_CLASSES, base_model, head_model, optimizer, BATCH_SIZE)
models = converter._convert()
self.initialize_model = models['initialize']
self.bottleneck_model = models['bottleneck']
self.train_head_model = models['train_head']
self.inference_model = models['inference']
self.optimizer_model = models['optimizer']
self.variables = self._generate_initial_variables()
optim_state_shapes = self._optimizer_state_shapes()
self.optim_state = [
np.zeros(shape, dtype=np.float32) for shape in optim_state_shapes
]
def _generate_initial_variables(self):
"""Generates the initial model variables."""
interpreter = tf.lite.Interpreter(model_content=self.initialize_model)
zero_in = interpreter.get_input_details()[0]
variable_outs = interpreter.get_output_details()
interpreter.allocate_tensors()
interpreter.set_tensor(zero_in['index'], np.float32(0.))
interpreter.invoke()
return [interpreter.get_tensor(var['index']) for var in variable_outs]
def _optimizer_state_shapes(self):
"""Reads the shapes of the optimizer parameters (mutable state)."""
interpreter = tf.lite.Interpreter(model_content=self.optimizer_model)
num_variables = len(self.variables)
optim_state_inputs = interpreter.get_input_details()[num_variables * 2:]
return [input_['shape'] for input_ in optim_state_inputs]
def prepare_bottlenecks(self):
"""Passes all images through the base model and save the bottlenecks.
This method has to be called before any training or inference.
"""
self.train_bottlenecks, self.train_labels = (
self._collect_and_generate_bottlenecks(self.train_img_generator))
self.val_bottlenecks, self.val_labels = (
self._collect_and_generate_bottlenecks(self.val_img_generator))
def _collect_and_generate_bottlenecks(self, image_gen):
"""Consumes a generator and converts all images to bottlenecks.
Args:
image_gen: A Keras data generator for images to process
Returns:
Two NumPy arrays: (bottlenecks, labels).
"""
collected_bottlenecks = np.zeros(
(image_gen.samples,) + BOTTLENECK_SHAPE, dtype=np.float32)
collected_labels = np.zeros((image_gen.samples, NUM_CLASSES),
dtype=np.float32)
next_idx = 0
for bottlenecks, truth in self._generate_bottlenecks(
make_finite(image_gen)):
batch_size = bottlenecks.shape[0]
collected_bottlenecks[next_idx:next_idx + batch_size] = bottlenecks
collected_labels[next_idx:next_idx + batch_size] = truth
next_idx += batch_size
return collected_bottlenecks, collected_labels
def _generate_bottlenecks(self, image_gen):
"""Generator adapter that passes images through the bottleneck model.
Args:
image_gen: A generator that returns images to be processed. Images are
paired with ground truth labels.
Yields:
Bottlenecks from input images, paired with ground truth labels.
"""
interpreter = tf.lite.Interpreter(model_content=self.bottleneck_model)
[x_in] = interpreter.get_input_details()
[bottleneck_out] = interpreter.get_output_details()
for (x, y) in image_gen:
batch_size = x.shape[0]
interpreter.resize_tensor_input(x_in['index'],
(batch_size, IMAGE_SIZE, IMAGE_SIZE, 3))
interpreter.allocate_tensors()
interpreter.set_tensor(x_in['index'], x)
interpreter.invoke()
bottleneck = interpreter.get_tensor(bottleneck_out['index'])
yield bottleneck, y
def train_head(self, num_epochs):
"""Trains the head model for a given number of epochs.
SGD is used as an optimizer.
Args:
num_epochs: how many epochs should be trained
Returns:
A list of train_loss values after every epoch trained.
Raises:
RuntimeError: when prepare_bottlenecks() has not been called.
"""
if not hasattr(self, 'train_bottlenecks'):
raise RuntimeError('prepare_bottlenecks has not been called')
results = []
for _ in range(num_epochs):
loss = self._train_one_epoch(
self._generate_batches(self.train_bottlenecks, self.train_labels))
results.append(loss)
return results
def _generate_batches(self, x, y):
"""Creates a generator that iterates over the data in batches."""
num_total = x.shape[0]
for begin in range(0, num_total, BATCH_SIZE):
end = min(begin + BATCH_SIZE, num_total)
yield x[begin:end], y[begin:end]
def _train_one_epoch(self, train_gen):
"""Performs one training epoch."""
interpreter = tf.lite.Interpreter(model_content=self.train_head_model)
interpreter.allocate_tensors()
x_in, y_in = interpreter.get_input_details()[:2]
variable_ins = interpreter.get_input_details()[2:]
loss_out = interpreter.get_output_details()[0]
gradient_outs = interpreter.get_output_details()[1:]
epoch_loss = 0.
num_processed = 0
for bottlenecks, truth in train_gen:
batch_size = bottlenecks.shape[0]
if batch_size < BATCH_SIZE:
bottlenecks = pad_batch(bottlenecks, BATCH_SIZE)
truth = pad_batch(truth, BATCH_SIZE)
interpreter.set_tensor(x_in['index'], bottlenecks)
interpreter.set_tensor(y_in['index'], truth)
for variable_in, variable_value in zip(variable_ins, self.variables):
interpreter.set_tensor(variable_in['index'], variable_value)
interpreter.invoke()
loss = interpreter.get_tensor(loss_out['index'])
gradients = [
interpreter.get_tensor(gradient_out['index'])
for gradient_out in gradient_outs
]
self._apply_gradients(gradients)
epoch_loss += loss * batch_size
num_processed += batch_size
epoch_loss /= num_processed
return epoch_loss
def _apply_gradients(self, gradients):
"""Applies the optimizer to the model parameters."""
interpreter = tf.lite.Interpreter(model_content=self.optimizer_model)
interpreter.allocate_tensors()
num_variables = len(self.variables)
variable_ins = interpreter.get_input_details()[:num_variables]
gradient_ins = interpreter.get_input_details()[num_variables:num_variables *
2]
state_ins = interpreter.get_input_details()[num_variables * 2:]
variable_outs = interpreter.get_output_details()[:num_variables]
state_outs = interpreter.get_output_details()[num_variables:]
for variable, gradient, variable_in, gradient_in in zip(
self.variables, gradients, variable_ins, gradient_ins):
interpreter.set_tensor(variable_in['index'], variable)
interpreter.set_tensor(gradient_in['index'], gradient)
for optim_state_elem, state_in in zip(self.optim_state, state_ins):
interpreter.set_tensor(state_in['index'], optim_state_elem)
interpreter.invoke()
self.variables = [
interpreter.get_tensor(variable_out['index'])
for variable_out in variable_outs
]
self.optim_state = [
interpreter.get_tensor(state_out['index']) for state_out in state_outs
]
def measure_inference_accuracy(self):
"""Runs the inference model and measures accuracy on the validation set."""
interpreter = tf.lite.Interpreter(model_content=self.inference_model)
bottleneck_in = interpreter.get_input_details()[0]
variable_ins = interpreter.get_input_details()[1:]
[y_out] = interpreter.get_output_details()
inference_accuracy = 0.
num_processed = 0
for bottleneck, truth in self._generate_batches(self.val_bottlenecks,
self.val_labels):
batch_size = bottleneck.shape[0]
interpreter.resize_tensor_input(bottleneck_in['index'],
(batch_size,) + BOTTLENECK_SHAPE)
interpreter.allocate_tensors()
interpreter.set_tensor(bottleneck_in['index'], bottleneck)
for variable_in, variable_value in zip(variable_ins, self.variables):
interpreter.set_tensor(variable_in['index'], variable_value)
interpreter.invoke()
preds = interpreter.get_tensor(y_out['index'])
acc = (np.argmax(preds, axis=1) == np.argmax(truth,
axis=1)).sum() / batch_size
inference_accuracy += acc * batch_size
num_processed += batch_size
inference_accuracy /= num_processed
return inference_accuracy
def make_finite(data_gen):
"""An adapter for Keras data generators that makes them finite.
The default behavior in Keras is to keep looping infinitely through
the data.
Args:
data_gen: An infinite Keras data generator.
Yields:
Same values as the parameter generator.
"""
num_samples = data_gen.samples
num_processed = 0
for batch in data_gen:
batch_size = batch[0].shape[0]
if batch_size + num_processed > num_samples:
batch_size = num_samples - num_processed
should_stop = True
else:
should_stop = False
if batch_size == 0:
return
batch = tuple(x[:batch_size] for x in batch)
yield batch
num_processed += batch_size
if should_stop:
return
# TODO(b/135138207) investigate if we can get rid of this.
def pad_batch(batch, batch_size):
"""Resize batch to a given size, tiling present samples over missing.
Example:
Suppose batch_size is 5, batch is [1, 2].
Then the return value is [1, 2, 1, 2, 1].
Args:
batch: An ndarray with first dimension size <= batch_size.
batch_size: Desired size for first dimension.
Returns:
An ndarray of the same shape, except first dimension has
the desired size.
"""
padded = np.zeros((batch_size,) + batch.shape[1:], dtype=batch.dtype)
next_idx = 0
while next_idx < batch_size:
fill_len = min(batch.shape[0], batch_size - next_idx)
padded[next_idx:next_idx + fill_len] = batch[:fill_len]
next_idx += fill_len
return padded
class ModelCorrectnessTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
super(ModelCorrectnessTest, cls).setUpClass()
zip_file = tf.keras.utils.get_file(
origin=DATASET_URL, fname='flower_photos.tgz', extract=True)
cls.dataset_dir = os.path.join(os.path.dirname(zip_file), 'flower_photos')
mobilenet_dir = tempfile.mkdtemp('tflite-transfer-test')
mobilenet_keras = tf.keras.applications.MobileNetV2(
input_shape=(IMAGE_SIZE, IMAGE_SIZE, 3),
include_top=False,
weights='imagenet')
tfv1.keras.experimental.export_saved_model(mobilenet_keras, mobilenet_dir)
cls.mobilenet_dir = mobilenet_dir
def setUp(self):
super(ModelCorrectnessTest, self).setUp()
self.mobilenet_dir = ModelCorrectnessTest.mobilenet_dir
self.dataset_dir = ModelCorrectnessTest.dataset_dir
def test_mobilenet_v2_saved_model_and_softmax_classifier(self):
base_model = bases.SavedModelBase(self.mobilenet_dir)
head_model = heads.SoftmaxClassifierHead(BATCH_SIZE, BOTTLENECK_SHAPE,
NUM_CLASSES)
optimizer = optimizers.SGD(LEARNING_RATE)
model = TransferModel(self.dataset_dir, base_model, head_model, optimizer)
self.assertModelAchievesAccuracy(model, 0.80)
def test_mobilenet_v2_saved_model_quantized_and_softmax_classifier(self):
base_model = bases.SavedModelBase(self.mobilenet_dir, quantize=True)
head_model = heads.SoftmaxClassifierHead(BATCH_SIZE, BOTTLENECK_SHAPE,
NUM_CLASSES)
optimizer = optimizers.SGD(LEARNING_RATE)
model = TransferModel(self.dataset_dir, base_model, head_model, optimizer)
self.assertModelAchievesAccuracy(model, 0.80)
def test_mobilenet_v2_base_and_softmax_classifier(self):
base_model = bases.MobileNetV2Base()
head_model = heads.SoftmaxClassifierHead(BATCH_SIZE, BOTTLENECK_SHAPE,
NUM_CLASSES)
optimizer = optimizers.SGD(LEARNING_RATE)
model = TransferModel(self.dataset_dir, base_model, head_model, optimizer)
self.assertModelAchievesAccuracy(model, 0.80)
def test_mobilenet_v2_base_and_softmax_classifier_l2(self):
base_model = bases.MobileNetV2Base()
head_model = heads.SoftmaxClassifierHead(
BATCH_SIZE, BOTTLENECK_SHAPE, NUM_CLASSES, l2_reg=0.1)
optimizer = optimizers.SGD(LEARNING_RATE)
model = TransferModel(self.dataset_dir, base_model, head_model, optimizer)
self.assertModelAchievesAccuracy(model, 0.80)
def test_mobilenet_v2_base_quantized_and_softmax_classifier(self):
base_model = bases.MobileNetV2Base(quantize=True)
head_model = heads.SoftmaxClassifierHead(BATCH_SIZE, BOTTLENECK_SHAPE,
NUM_CLASSES)
optimizer = optimizers.SGD(LEARNING_RATE)
model = TransferModel(self.dataset_dir, base_model, head_model, optimizer)
self.assertModelAchievesAccuracy(model, 0.80)
def test_mobilenet_v2_base_and_softmax_classifier_adam(self):
base_model = bases.MobileNetV2Base()
head_model = heads.SoftmaxClassifierHead(BATCH_SIZE, BOTTLENECK_SHAPE,
NUM_CLASSES)
optimizer = optimizers.Adam()
model = TransferModel(self.dataset_dir, base_model, head_model, optimizer)
self.assertModelAchievesAccuracy(model, 0.80)
def assertModelAchievesAccuracy(self, model, target_accuracy, num_epochs=30):
model.prepare_bottlenecks()
print('Bottlenecks prepared')
history = model.train_head(num_epochs)
print('Training completed, history = {}'.format(history))
accuracy = model.measure_inference_accuracy()
print('Final accuracy = {:.2f}'.format(accuracy))
self.assertGreater(accuracy, target_accuracy)
if __name__ == '__main__':
unittest.main()
| 38.059524 | 103 | 0.714983 | [
"Apache-2.0"
] | 121Y/examples | lite/examples/model_personalization/converter/tfltransfer/model_correctness_test.py | 15,985 | Python |
def get_the_ith_largest(s1: list, s2: list, i: int):
m = len(s1)
n = len(s2)
if i > m + n:
raise IndexError('list index out of range')
i -= 1
l1 = 0
r1 = i if m - 1 >= i else m - 1
while l1 <= r1:
c1 = (l1 + r1) // 2
c1_f = i - c1 - 1
c1_b = i - c1
if c1_f >= 0 and (c1_f >= n or s2[c1_f] > s1[c1]):
l1 = c1 + 1
elif 0 <= c1_b < n and s2[c1_b] < s1[c1]:
r1 = c1 - 1
else:
return s1[c1]
return get_the_ith_largest(s2, s1, i + 1)
if __name__ == '__main__':
s1_test = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
s2_test = [2, 3, 4, 6, 10, 20, 100]
print(get_the_ith_largest(s2_test, s1_test, 8))
| 25.586207 | 65 | 0.467655 | [
"MIT"
] | keybrl/xdu-coursework | projects/g3h2-algorithm/practice1/4.py | 742 | Python |
# -*- coding: utf-8 -*-
#
# Copyright © 2009-2010 Pierre Raybaut
# Licensed under the terms of the MIT License
# (see spyderlib/__init__.py for details)
"""
spyderlib.widgets
=================
Widgets defined in this module may be used in any other Qt-based application
They are also used in Spyder through the Plugin interface
(see spyderlib.plugins)
"""
| 23.375 | 77 | 0.679144 | [
"MIT"
] | MarlaJahari/Marve | spyderlib/widgets/__init__.py | 375 | Python |
from math import ceil
from time import sleep
from config import *
from servo import *
from camera import *
from pid import *
# Init
servo1 = Servo(2)
servo2 = Servo(3)
servo_count = 0
#servo1.setAngle(90)
#sleep(1)
#servo2.setAngle(90)
#sleep(1)
camera = Camera()
PID = PID()
# Loop
while True:
data = camera.getBallPos()
#data = None
if not (data[0] and data[1]):
print("No ball found")
else:
# Calculate PID
move = PID.calculate(data[0], data[1])
# Print result
#print("x", round(move[0]), "y", round(move[1]), round(move[2]*600))
# Move servos
#move_x = ( (move[0] - conf.cam_x_min) / (conf.cam_x_max - conf.cam_x_min) ) * (conf.servo_max - conf.servo_min) + conf.servo_min
#move_y = ( (move[1] - conf.cam_y_min) / (conf.cam_y_max - conf.cam_y_min) ) * (conf.servo_max - conf.servo_min) + conf.servo_min
move_x = ((move[0])/2)+90
move_y = ((-move[1])/2)+90
print(move_x, move_y)
servo_count += 1
if servo_count > 3:
servo1.setAngle(move_x)
servo2.setAngle(move_y)
servo_count = 0
#servo.moveTo(2, move[1])
# Show frame
cv2.imshow("Frame", data[2])
key = cv2.waitKey(1) & 0xFF
# Stop if 'q' is pressed
if key == ord("q"):
servo1.stop()
servo2.stop()
camera.stop()
| 17.72619 | 137 | 0.534587 | [
"MIT"
] | alvarlagerlof/ball-pid | run.py | 1,489 | Python |
# -*- coding: utf-8 -*-
# @Author: Jie
# @Date: 2017-06-15 14:11:08
# @Last Modified by: Jie Yang, Contact: [email protected]
# @Last Modified time: 2019-02-13 12:41:44
from __future__ import print_function
import time
import sys
import argparse
import random
import torch
import gc
import torch.nn as nn
import torch.optim as optim
import numpy as np
from utils.metric import get_ner_fmeasure
from model.seqlabel import SeqLabel
from model.sentclassifier import SentClassifier
from utils.data import Data
import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
try:
import cPickle as pickle
except ImportError:
import pickle
DEFAULT_TRAINED_FILE = 'test_data/lstmtestglove50.9.model'
seed_num = 46
random.seed(seed_num)
torch.manual_seed(seed_num)
np.random.seed(seed_num)
def importance_matrix(sensitivities, data,
print_imp=True, show_table=True, tag_to_ablate=None):
'''
Builds a matrix of tag sensitivities
:param sensitivities: This is a matrix of [num_tags, num_neurons],
which is [10 x 50] in our experimental configuration.
:return:
'''
important_lists = []
important_nps = np.zeros(50, dtype=int)
sensitivities = sensitivities[1:] # omit padding tag
for i in range(len(sensitivities)):
important_list = []
important_np = np.zeros(50, dtype=int)
tag_sensitivity_row = sensitivities[i]
for j in range(len(tag_sensitivity_row)):
most_important = np.argmax(tag_sensitivity_row)
important_list.append(most_important)
important_np[j] = most_important
index = [most_important]
tag_sensitivity_row[most_important] = np.NINF
important_lists.append(important_list)
important_nps = np.vstack((important_nps, important_np))
important_nps = np.delete(important_nps, 0, axis=0) # delete padding tag
np.save("imps.npy",important_nps) # save importance rows for other scripts to use
important_nps = np.transpose(important_nps)
if show_table:
sns.set()
# Smaller than normal fonts
sns.set(font_scale=0.5)
x_tick = [data.label_alphabet.get_instance(tag) for tag in sorted(data.tag_counts)]
del(x_tick[0])
ax = sns.heatmap(important_nps, annot=True, xticklabels=x_tick,
cmap=ListedColormap(['white']), cbar=False, yticklabels=False,
linecolor='gray', linewidths=0.4)
title = "Importance rankings of neurons per tag"
plt.title(title, fontsize=18)
ttl = ax.title
ttl.set_position([0.5, 1.05])
plt.show()
def trim_model_dir(model_dir):
model_dir = model_dir.replace('/','-')
return model_dir
ax.figure.savefig("ImportanceRankings-{}.png".format(trim_model_dir(data.model_dir)))
if print_imp:
imp_file = open("Importance-{}.txt".format(trim_model_dir(data.model_dir)), "w+")
print('Neuron importance ranking for each NER tag:')
for i, l in enumerate(important_lists):
tags = [data.label_alphabet.get_instance(tag) for tag in sorted(data.tag_counts)]
del(tags[0]) # remove PAD tag
print ("\t{}\t{}".format(tags[i], l))
imp_file.write("{}\t{}\n".format(tags[i], l))
imp_file.write("\n")
np.savetxt("Importance-{}.tsv".format(trim_model_dir(data.model_dir)),
important_nps, fmt='%2.0d', delimiter='\t')
return important_nps
def heatmap_sensitivity(sensitivities,
modelname=DEFAULT_TRAINED_FILE,
testname="",
show_pad=False,
show_vals=True,
disable=False):
'''
Shows a heatmap for the sensitivity values, saves the heatmap to a PNG file,
and also saves the sensitivity matrix to an .npy file,
which we use for calculating correlations between models later.
:param sensitivities: This is a matrix of [num_tags, num_neurons],
which is [10 x 50] in our experimental configuration.
:param disable: disable is just to turn off for debugging
:return:
'''
# transpose to match chart in Figure 7. of paper
sensitivities = np.transpose(sensitivities)
# column 0 is the padding tag
start = 1
if show_pad:
start = 0
sensitivities = sensitivities[0:50, start:10]
sns.set()
# Smaller than normal fonts
sns.set(font_scale=0.5)
x_tick = [data.label_alphabet.get_instance(tag) for tag in sorted(data.tag_counts)]
if show_pad: x_tick[0] = 'PAD'
else: del(x_tick[0])
# change tags' order to use in downstream correlation diagrams
sensitivities_temp = np.zeros((50, 9))
x_tick_output = ['B-PER', 'I-PER', 'B-LOC', 'I-LOC', 'B-ORG', 'I-ORG', 'B-MISC', 'I-MISC', 'O']
for i in range(len(x_tick_output)):
sensitivities_temp[:, i] = sensitivities[:, x_tick.index(x_tick_output[i])]
np.save(modelname+'_sensitivities.npy', sensitivities_temp)
# put sensititivites in heat map
if not disable:
ax = sns.heatmap(sensitivities, xticklabels=x_tick, annot=show_vals, fmt=".2g")
title = "({}): ".format(testname) + modelname
plt.title(title, fontsize=18)
ttl = ax.title
ttl.set_position([0.5, 1.05])
plt.show()
ax.figure.savefig(modelname+"_heatmap.png")
def get_sensitivity_matrix(label, debug=True):
'''
Given a tag like 4: (B-PER), return the sensitivity matrix
:param label:
:return:
'''
avg_for_label = data.tag_contributions[label]/data.tag_counts[label]
sum_other_counts = 0
# data.tag_contributions[0] is for the padding label and can be ignored
sum_other_contributions = np.zeros((10, 50))
for l in data.tag_counts:
if l != label and l != 0: # if l != label: (to consider the padding label which is 0)
sum_other_counts += data.tag_counts[l]
sum_other_contributions += data.tag_contributions[l]
avg_for_others = sum_other_contributions/sum_other_counts
s_ij = avg_for_label - avg_for_others
s_ij_label = s_ij[label]
return s_ij_label # was return s_ij
def data_initialization(data):
data.initial_feature_alphabets()
data.build_alphabet(data.train_dir)
data.build_alphabet(data.dev_dir)
data.build_alphabet(data.test_dir)
data.fix_alphabet()
def predict_check(pred_variable, gold_variable, mask_variable, sentence_classification=False):
"""
input:
pred_variable (batch_size, sent_len): pred tag result, in numpy format
gold_variable (batch_size, sent_len): gold result variable
mask_variable (batch_size, sent_len): mask variable
"""
pred = pred_variable.cpu().data.numpy()
gold = gold_variable.cpu().data.numpy()
mask = mask_variable.cpu().data.numpy()
overlaped = (pred == gold)
if sentence_classification:
# print(overlaped)
# print(overlaped*pred)
right_token = np.sum(overlaped)
total_token = overlaped.shape[0] ## =batch_size
else:
right_token = np.sum(overlaped * mask)
total_token = mask.sum()
# print("right: %s, total: %s"%(right_token, total_token))
return right_token, total_token
def recover_label(pred_variable, gold_variable, mask_variable, label_alphabet, word_recover, sentence_classification=False):
"""
input:
pred_variable (batch_size, sent_len): pred tag result
gold_variable (batch_size, sent_len): gold result variable
mask_variable (batch_size, sent_len): mask variable
"""
pred_variable = pred_variable[word_recover]
# print("reordered labels: {}".format(pred_variable))
gold_variable = gold_variable[word_recover]
mask_variable = mask_variable[word_recover]
batch_size = gold_variable.size(0)
if sentence_classification:
pred_tag = pred_variable.cpu().data.numpy().tolist()
gold_tag = gold_variable.cpu().data.numpy().tolist()
pred_label = [label_alphabet.get_instance(pred) for pred in pred_tag]
gold_label = [label_alphabet.get_instance(gold) for gold in gold_tag]
else:
seq_len = gold_variable.size(1)
mask = mask_variable.cpu().data.numpy()
pred_tag = pred_variable.cpu().data.numpy()
gold_tag = gold_variable.cpu().data.numpy()
batch_size = mask.shape[0]
pred_label = []
gold_label = []
for idx in range(batch_size):
pred = [label_alphabet.get_instance(pred_tag[idx][idy]) for idy in range(seq_len) if mask[idx][idy] != 0]
gold = [label_alphabet.get_instance(gold_tag[idx][idy]) for idy in range(seq_len) if mask[idx][idy] != 0]
assert(len(pred)==len(gold))
pred_label.append(pred)
gold_label.append(gold)
return pred_label, gold_label
def recover_nbest_label(pred_variable, mask_variable, label_alphabet, word_recover):
"""
input:
pred_variable (batch_size, sent_len, nbest): pred tag result
mask_variable (batch_size, sent_len): mask variable
word_recover (batch_size)
output:
nbest_pred_label list: [batch_size, nbest, each_seq_len]
"""
# exit(0)
pred_variable = pred_variable[word_recover]
mask_variable = mask_variable[word_recover]
batch_size = pred_variable.size(0)
seq_len = pred_variable.size(1)
nbest = pred_variable.size(2)
mask = mask_variable.cpu().data.numpy()
pred_tag = pred_variable.cpu().data.numpy()
batch_size = mask.shape[0]
pred_label = []
for idx in range(batch_size):
pred = []
for idz in range(nbest):
each_pred = [label_alphabet.get_instance(pred_tag[idx][idy][idz]) for idy in range(seq_len) if mask[idx][idy] != 0]
pred.append(each_pred)
pred_label.append(pred)
return pred_label
def lr_decay(optimizer, epoch, decay_rate, init_lr):
lr = init_lr/(1+decay_rate*epoch)
print(" Learning rate is set as:", lr)
for param_group in optimizer.param_groups:
param_group['lr'] = lr
return optimizer
def count_parameters(model):
return sum(p.numel() for p in model.parameters() if p.requires_grad)
def evaluate(data, model, name, nbest=None, print_tag_counts=False, tag_to_ablate=None):
'''
:param data:
:param model:
:param name:
:param nbest:
:param print_tag_counts:
:param tag_to_ablate: if this is set to a tag name, like 'B-ORG', then in the LSTM layer's forward() we ablate the
number of neurons specified by data.ablate_num
:return:
'''
ablate_list_for_tag = None
if tag_to_ablate:
data.ablate_tag = tag_to_ablate
ablate_list_for_tag = data.ablate_list[tag_to_ablate]
print("\nEVALUATE file: {}, set={}, \n\t ablate_num={} tag: {} \nablate_list_for_tag={}".format(
data.model_dir, name, data.current_ablate_ind, tag_to_ablate, ablate_list_for_tag))
if name == "train":
instances = data.train_Ids
elif name == "dev":
instances = data.dev_Ids
elif name == 'test':
instances = data.test_Ids
elif name == 'raw':
instances = data.raw_Ids
else:
print("Error: wrong evaluate name,", name)
exit(1)
right_token = 0
whole_token = 0
nbest_pred_results = []
pred_scores = []
pred_results = []
gold_results = []
## set model in eval model
model.eval()
''' Get count of model parameters '''
# print("COUNT PARAMETERS: {}".format(count_parameters(model)))
batch_size = data.HP_batch_size
start_time = time.time()
train_num = len(instances)
total_batch = train_num//batch_size+1
for batch_id in range(total_batch):
start = batch_id*batch_size
end = (batch_id+1)*batch_size
if end > train_num:
end = train_num
instance = instances[start:end]
if not instance:
continue
batch_word, batch_features, batch_wordlen, batch_wordrecover, batch_char, batch_charlen, batch_charrecover, batch_label, mask = batchify_with_label(instance, data.HP_gpu, False, data.sentence_classification)
if nbest and not data.sentence_classification:
scores, nbest_tag_seq = model.decode_nbest(batch_word,batch_features, batch_wordlen, batch_char, batch_charlen, batch_charrecover, mask, nbest)
nbest_pred_result = recover_nbest_label(nbest_tag_seq, mask, data.label_alphabet, batch_wordrecover)
nbest_pred_results += nbest_pred_result
pred_scores += scores[batch_wordrecover].cpu().data.numpy().tolist()
## select the best sequence to evalurate
tag_seq = nbest_tag_seq[:,:,0]
else:
tag_seq = model(batch_word, batch_features, batch_wordlen, batch_char, batch_charlen, batch_charrecover, mask)
pred_label, gold_label = recover_label(tag_seq, batch_label, mask, data.label_alphabet, batch_wordrecover, data.sentence_classification)
pred_results += pred_label
gold_results += gold_label
decode_time = time.time() - start_time
speed = len(instances)/decode_time
acc, p, r, f = get_ner_fmeasure(gold_results, pred_results, data.tagScheme, data=data)
if nbest and not data.sentence_classification:
return speed, acc, p, r, f, nbest_pred_results, pred_scores
''' Get per-tag sensitivity '''
## print("TOTAL BATCH ITERATIONS: {}".format(data.iteration))
sensitivity_matrices = [] # This will hold a row for each tag's sensitivity
for tag in sorted(data.tag_counts):
if print_tag_counts:
if tag == 0:
print("Padding {}: {} instances.".format('0', data.tag_counts[tag]))
else:
print("Tag {}: {} instances.".format(data.label_alphabet.get_instance(tag), data.tag_counts[tag]))
sensitivity_tag = get_sensitivity_matrix(tag)
sensitivity_matrices.append(sensitivity_tag)
sensitivity_combined = np.squeeze(np.stack([sensitivity_matrices]))
# TODO: the following line would stack multiple models' sensitivity,
# but we don't need it unless running many different models for stats
# data.sensitivity_matrices_combined.append(sensitivity_combined)
return speed, acc, p, r, f, pred_results, pred_scores, sensitivity_combined
def batchify_with_label(input_batch_list, gpu, if_train=True, sentence_classification=False):
if sentence_classification:
return batchify_sentence_classification_with_label(input_batch_list, gpu, if_train)
else:
return batchify_sequence_labeling_with_label(input_batch_list, gpu, if_train)
def batchify_sequence_labeling_with_label(input_batch_list, gpu, if_train=True):
"""
input: list of words, chars and labels, various length. [[words, features, chars, labels],[words, features, chars,labels],...]
words: word ids for one sentence. (batch_size, sent_len)
features: features ids for one sentence. (batch_size, sent_len, feature_num)
chars: char ids for on sentences, various length. (batch_size, sent_len, each_word_length)
labels: label ids for one sentence. (batch_size, sent_len)
output:
zero padding for word and char, with their batch length
word_seq_tensor: (batch_size, max_sent_len) Variable
feature_seq_tensors: [(batch_size, max_sent_len),...] list of Variable
word_seq_lengths: (batch_size,1) Tensor
char_seq_tensor: (batch_size*max_sent_len, max_word_len) Variable
char_seq_lengths: (batch_size*max_sent_len,1) Tensor
char_seq_recover: (batch_size*max_sent_len,1) recover char sequence order
label_seq_tensor: (batch_size, max_sent_len)
mask: (batch_size, max_sent_len)
"""
batch_size = len(input_batch_list)
words = [sent[0] for sent in input_batch_list]
features = [np.asarray(sent[1]) for sent in input_batch_list]
feature_num = len(features[0][0])
chars = [sent[2] for sent in input_batch_list]
labels = [sent[3] for sent in input_batch_list]
word_seq_lengths = torch.LongTensor(list(map(len, words)))
max_seq_len = word_seq_lengths.max().item()
word_seq_tensor = torch.zeros((batch_size, max_seq_len), requires_grad = if_train).long()
label_seq_tensor = torch.zeros((batch_size, max_seq_len), requires_grad = if_train).long()
feature_seq_tensors = []
for idx in range(feature_num):
feature_seq_tensors.append(torch.zeros((batch_size, max_seq_len),requires_grad = if_train).long())
# '
''' 517 '''
# mask = torch.zeros((batch_size, max_seq_len), requires_grad = if_train).byte()
mask = torch.zeros((batch_size, max_seq_len), requires_grad=if_train).bool()
for idx, (seq, label, seqlen) in enumerate(zip(words, labels, word_seq_lengths)):
seqlen = seqlen.item()
word_seq_tensor[idx, :seqlen] = torch.LongTensor(seq)
label_seq_tensor[idx, :seqlen] = torch.LongTensor(label)
mask[idx, :seqlen] = torch.Tensor([1]*seqlen)
for idy in range(feature_num):
feature_seq_tensors[idy][idx,:seqlen] = torch.LongTensor(features[idx][:,idy])
word_seq_lengths, word_perm_idx = word_seq_lengths.sort(0, descending=True)
word_seq_tensor = word_seq_tensor[word_perm_idx]
for idx in range(feature_num):
feature_seq_tensors[idx] = feature_seq_tensors[idx][word_perm_idx]
label_seq_tensor = label_seq_tensor[word_perm_idx]
mask = mask[word_perm_idx]
### deal with char
# pad_chars (batch_size, max_seq_len)
pad_chars = [chars[idx] + [[0]] * (max_seq_len-len(chars[idx])) for idx in range(len(chars))]
length_list = [list(map(len, pad_char)) for pad_char in pad_chars]
max_word_len = max(map(max, length_list))
char_seq_tensor = torch.zeros((batch_size, max_seq_len, max_word_len), requires_grad = if_train).long()
char_seq_lengths = torch.LongTensor(length_list)
for idx, (seq, seqlen) in enumerate(zip(pad_chars, char_seq_lengths)):
for idy, (word, wordlen) in enumerate(zip(seq, seqlen)):
# print len(word), wordlen
char_seq_tensor[idx, idy, :wordlen] = torch.LongTensor(word)
char_seq_tensor = char_seq_tensor[word_perm_idx].view(batch_size*max_seq_len,-1)
char_seq_lengths = char_seq_lengths[word_perm_idx].view(batch_size*max_seq_len,)
char_seq_lengths, char_perm_idx = char_seq_lengths.sort(0, descending=True)
char_seq_tensor = char_seq_tensor[char_perm_idx]
_, char_seq_recover = char_perm_idx.sort(0, descending=False)
_, word_seq_recover = word_perm_idx.sort(0, descending=False)
if gpu:
word_seq_tensor = word_seq_tensor.cuda()
for idx in range(feature_num):
feature_seq_tensors[idx] = feature_seq_tensors[idx].cuda()
word_seq_lengths = word_seq_lengths.cuda()
word_seq_recover = word_seq_recover.cuda()
label_seq_tensor = label_seq_tensor.cuda()
char_seq_tensor = char_seq_tensor.cuda()
char_seq_recover = char_seq_recover.cuda()
mask = mask.cuda()
return word_seq_tensor,feature_seq_tensors, word_seq_lengths, word_seq_recover, char_seq_tensor, char_seq_lengths, char_seq_recover, label_seq_tensor, mask
def batchify_sentence_classification_with_label(input_batch_list, gpu, if_train=True):
"""
input: list of words, chars and labels, various length. [[words, features, chars, labels],[words, features, chars,labels],...]
words: word ids for one sentence. (batch_size, sent_len)
features: features ids for one sentence. (batch_size, feature_num), each sentence has one set of feature
chars: char ids for on sentences, various length. (batch_size, sent_len, each_word_length)
labels: label ids for one sentence. (batch_size,), each sentence has one set of feature
output:
zero padding for word and char, with their batch length
word_seq_tensor: (batch_size, max_sent_len) Variable
feature_seq_tensors: [(batch_size,), ... ] list of Variable
word_seq_lengths: (batch_size,1) Tensor
char_seq_tensor: (batch_size*max_sent_len, max_word_len) Variable
char_seq_lengths: (batch_size*max_sent_len,1) Tensor
char_seq_recover: (batch_size*max_sent_len,1) recover char sequence order
label_seq_tensor: (batch_size, )
mask: (batch_size, max_sent_len)
"""
batch_size = len(input_batch_list)
words = [sent[0] for sent in input_batch_list]
features = [np.asarray(sent[1]) for sent in input_batch_list]
feature_num = len(features[0])
chars = [sent[2] for sent in input_batch_list]
labels = [sent[3] for sent in input_batch_list]
word_seq_lengths = torch.LongTensor(list(map(len, words)))
max_seq_len = word_seq_lengths.max().item()
word_seq_tensor = torch.zeros((batch_size, max_seq_len), requires_grad = if_train).long()
label_seq_tensor = torch.zeros((batch_size, ), requires_grad = if_train).long()
feature_seq_tensors = []
for idx in range(feature_num):
feature_seq_tensors.append(torch.zeros((batch_size, max_seq_len),requires_grad = if_train).long())
''' 517 '''
# mask = torch.zeros((batch_size, max_seq_len), requires_grad = if_train).byte()
mask = torch.zeros((batch_size, max_seq_len), requires_grad = if_train).bool()
label_seq_tensor = torch.LongTensor(labels)
# exit(0)
for idx, (seq, seqlen) in enumerate(zip(words, word_seq_lengths)):
seqlen = seqlen.item()
word_seq_tensor[idx, :seqlen] = torch.LongTensor(seq)
mask[idx, :seqlen] = torch.Tensor([1]*seqlen)
for idy in range(feature_num):
feature_seq_tensors[idy][idx,:seqlen] = torch.LongTensor(features[idx][:,idy])
word_seq_lengths, word_perm_idx = word_seq_lengths.sort(0, descending=True)
word_seq_tensor = word_seq_tensor[word_perm_idx]
for idx in range(feature_num):
feature_seq_tensors[idx] = feature_seq_tensors[idx][word_perm_idx]
label_seq_tensor = label_seq_tensor[word_perm_idx]
mask = mask[word_perm_idx]
### deal with char
# pad_chars (batch_size, max_seq_len)
pad_chars = [chars[idx] + [[0]] * (max_seq_len-len(chars[idx])) for idx in range(len(chars))]
length_list = [list(map(len, pad_char)) for pad_char in pad_chars]
max_word_len = max(map(max, length_list))
char_seq_tensor = torch.zeros((batch_size, max_seq_len, max_word_len), requires_grad = if_train).long()
char_seq_lengths = torch.LongTensor(length_list)
for idx, (seq, seqlen) in enumerate(zip(pad_chars, char_seq_lengths)):
for idy, (word, wordlen) in enumerate(zip(seq, seqlen)):
# print len(word), wordlen
char_seq_tensor[idx, idy, :wordlen] = torch.LongTensor(word)
char_seq_tensor = char_seq_tensor[word_perm_idx].view(batch_size*max_seq_len,-1)
char_seq_lengths = char_seq_lengths[word_perm_idx].view(batch_size*max_seq_len,)
char_seq_lengths, char_perm_idx = char_seq_lengths.sort(0, descending=True)
char_seq_tensor = char_seq_tensor[char_perm_idx]
_, char_seq_recover = char_perm_idx.sort(0, descending=False)
_, word_seq_recover = word_perm_idx.sort(0, descending=False)
if gpu:
word_seq_tensor = word_seq_tensor.cuda()
for idx in range(feature_num):
feature_seq_tensors[idx] = feature_seq_tensors[idx].cuda()
word_seq_lengths = word_seq_lengths.cuda()
word_seq_recover = word_seq_recover.cuda()
label_seq_tensor = label_seq_tensor.cuda()
char_seq_tensor = char_seq_tensor.cuda()
char_seq_recover = char_seq_recover.cuda()
mask = mask.cuda()
return word_seq_tensor,feature_seq_tensors, word_seq_lengths, word_seq_recover, char_seq_tensor, char_seq_lengths, char_seq_recover, label_seq_tensor, mask
def load_model_to_test(data, train=False, dev=True, test=False, tag=None):
'''
Set any ONE of train, dev, test to true, in order to evaluate on that set.
:param data:
:param train:
:param dev: Default set to test, because that was what the original experiment did
:param test:
:return:
'''
print("Load pretrained model...")
if data.sentence_classification:
model = SentClassifier(data)
else:
model = SeqLabel(data)
model.load_state_dict(torch.load(data.pretrained_model_path))
'''----------------TESTING----------------'''
if (train):
speed, acc, p, r, f, _,_, train_sensitivities = evaluate(data, model, "train")
heatmap_sensitivity(train_sensitivities, data.pretrained_model_path, testname="train")
if data.seg:
current_score = f
print("Speed: %.2fst/s; acc: %.4f, p: %.4f, r: %.4f, f: %.4f"%(speed, acc, p, r, f))
else:
current_score = acc
print("Speed: %.2fst/s; acc: %.4f"%(speed, acc))
if (dev):
# for tag in data.ablate_list:
speed, acc, p, r, f, _,_, sensitivities = evaluate(
data, model, "dev", tag_to_ablate=tag)
if data.seg:
current_score = f
print("Speed: %.2fst/s; acc: %.4f, p: %.4f, r: %.4f, f: %.4f" % (speed, acc, p, r, f))
else:
current_score = acc
print("Speed: %.2fst/s; acc: %.4f" % (speed, acc))
if (data.ablate_num == 0):
heatmap_sensitivity(sensitivities, data.pretrained_model_path, testname="dev")
importance_matrix(sensitivities, data)
if (test):
speed, acc, p, r, f, _,_ = evaluate(data, model, "test")
if data.seg:
print("Speed: %.2fst/s; acc: %.4f, p: %.4f, r: %.4f, f: %.4f"%(speed, acc, p, r, f))
else:
print("Speed: %.2fst/s; acc: %.4f"%(speed, acc))
return
def train(data):
print("Training model...")
data.show_data_summary()
save_data_name = data.model_dir +".dset"
data.save(save_data_name)
if data.sentence_classification:
model = SentClassifier(data)
else:
model = SeqLabel(data)
if data.optimizer.lower() == "sgd":
optimizer = optim.SGD(model.parameters(), lr=data.HP_lr, momentum=data.HP_momentum,weight_decay=data.HP_l2)
elif data.optimizer.lower() == "adagrad":
optimizer = optim.Adagrad(model.parameters(), lr=data.HP_lr, weight_decay=data.HP_l2)
elif data.optimizer.lower() == "adadelta":
optimizer = optim.Adadelta(model.parameters(), lr=data.HP_lr, weight_decay=data.HP_l2)
elif data.optimizer.lower() == "rmsprop":
optimizer = optim.RMSprop(model.parameters(), lr=data.HP_lr, weight_decay=data.HP_l2)
elif data.optimizer.lower() == "adam":
optimizer = optim.Adam(model.parameters(), lr=data.HP_lr, weight_decay=data.HP_l2)
else:
print("Optimizer illegal: %s"%(data.optimizer))
exit(1)
best_dev = -10
# data.HP_iteration = 1
## start training
for idx in range(data.HP_iteration):
epoch_start = time.time()
temp_start = epoch_start
print("Epoch: %s/%s" %(idx,data.HP_iteration))
if data.optimizer == "SGD":
optimizer = lr_decay(optimizer, idx, data.HP_lr_decay, data.HP_lr)
instance_count = 0
sample_id = 0
sample_loss = 0
total_loss = 0
right_token = 0
whole_token = 0
random.shuffle(data.train_Ids)
print("Shuffle: first input word list:", data.train_Ids[0][0])
## set model in train model
model.train()
model.zero_grad()
batch_size = data.HP_batch_size
batch_id = 0
train_num = len(data.train_Ids)
total_batch = train_num//batch_size+1
for batch_id in range(total_batch):
start = batch_id*batch_size
end = (batch_id+1)*batch_size
if end >train_num:
end = train_num
instance = data.train_Ids[start:end]
if not instance:
continue
batch_word, batch_features, batch_wordlen, batch_wordrecover, batch_char, batch_charlen, batch_charrecover, batch_label, mask = batchify_with_label(instance, data.HP_gpu, True, data.sentence_classification)
instance_count += 1
loss, tag_seq = model.calculate_loss(batch_word, batch_features, batch_wordlen, batch_char, batch_charlen, batch_charrecover, batch_label, mask)
right, whole = predict_check(tag_seq, batch_label, mask, data.sentence_classification)
right_token += right
whole_token += whole
# print("loss:",loss.item())
sample_loss += loss.item()
total_loss += loss.item()
if end%500 == 0:
temp_time = time.time()
temp_cost = temp_time - temp_start
temp_start = temp_time
print(" Instance: %s; Time: %.2fs; loss: %.4f; acc: %s/%s=%.4f"%(end, temp_cost, sample_loss, right_token, whole_token,(right_token+0.)/whole_token))
if sample_loss > 1e8 or str(sample_loss) == "nan":
print("ERROR: LOSS EXPLOSION (>1e8) ! PLEASE SET PROPER PARAMETERS AND STRUCTURE! EXIT....")
exit(1)
sys.stdout.flush()
sample_loss = 0
loss.backward()
optimizer.step()
model.zero_grad()
temp_time = time.time()
temp_cost = temp_time - temp_start
print(" Instance: %s; Time: %.2fs; loss: %.4f; acc: %s/%s=%.4f"%(end, temp_cost, sample_loss, right_token, whole_token,(right_token+0.)/whole_token))
epoch_finish = time.time()
epoch_cost = epoch_finish - epoch_start
print("Epoch: %s training finished. Time: %.2fs, speed: %.2fst/s, total loss: %s"%(idx, epoch_cost, train_num/epoch_cost, total_loss))
print("totalloss:", total_loss)
if total_loss > 1e8 or str(total_loss) == "nan":
print("ERROR: LOSS EXPLOSION (>1e8) ! PLEASE SET PROPER PARAMETERS AND STRUCTURE! EXIT....")
exit(1)
# continue
speed, acc, p, r, f, _,_ , sensitivities = evaluate(data, model, "dev")
dev_finish = time.time()
dev_cost = dev_finish - epoch_finish
if data.seg:
current_score = f
print("Dev: time: %.2fs, speed: %.2fst/s; acc: %.4f, p: %.4f, r: %.4f, f: %.4f"%(dev_cost, speed, acc, p, r, f))
else:
current_score = acc
print("Dev: time: %.2fs speed: %.2fst/s; acc: %.4f"%(dev_cost, speed, acc))
if current_score > best_dev:
if data.seg:
print("Exceed previous best f score:", best_dev)
else:
print("Exceed previous best acc score:", best_dev)
model_name = data.model_dir +'.'+ str(idx) + ".model"
print("Save current best model in file:", model_name)
torch.save(model.state_dict(), model_name)
best_dev = current_score
# ## decode test
speed, acc, p, r, f, _,_ , sensitivities = evaluate(data, model, "test")
test_finish = time.time()
test_cost = test_finish - dev_finish
if data.seg:
print("Test: time: %.2fs, speed: %.2fst/s; acc: %.4f, p: %.4f, r: %.4f, f: %.4f"%(test_cost, speed, acc, p, r, f))
else:
print("Test: time: %.2fs, speed: %.2fst/s; acc: %.4f"%(test_cost, speed, acc))
gc.collect()
def load_model_decode(data, name):
print("Load Model from file: {}, name={}".format(data.model_dir, name) )
if data.sentence_classification:
model = SentClassifier(data)
else:
model = SeqLabel(data)
# model = SeqModel(data)
## load model need consider if the model trained in GPU and load in CPU, or vice versa
# if not gpu:
# model.load_state_dict(torch.load(model_dir))
# # model.load_state_dict(torch.load(model_dir), map_location=lambda storage, loc: storage)
# # model = torch.load(model_dir, map_location=lambda storage, loc: storage)
# else:
# model.load_state_dict(torch.load(model_dir))
# # model = torch.load(model_dir)
model.load_state_dict(torch.load(data.load_model_dir))
print("Decode %s data, nbest: %s ..."%(name, data.nbest))
start_time = time.time()
speed, acc, p, r, f, pred_results, pred_scores = evaluate(data, model, name, data.nbest)
end_time = time.time()
time_cost = end_time - start_time
if data.seg:
print("%s: time:%.2fs, speed:%.2fst/s; acc: %.4f, p: %.4f, r: %.4f, f: %.4f"%(name, time_cost, speed, acc, p, r, f))
else:
print("%s: time:%.2fs, speed:%.2fst/s; acc: %.4f"%(name, time_cost, speed, acc))
return pred_results, pred_scores
def load_ablation_file():
filename = ("Importance-" + data.model_dir + ".txt").replace('/','-')
ablate_lists = {}
''' B-ORG [4, 24, 14, 15, 19, 46, 36, 22, 27, 9, 13, 20, 25, 33, 45, 0, 35, 40, 48, 42, 44, 18, 37, 21, 32, 29, 16, 26, 11, 7, 23, 49, 12, 5, 8, 38, 2, 47, 1, 43, 31, 30, 41, 6, 28, 3, 34, 39, 10, 17]'''
with open(filename, 'r+') as file:
lines = file.readlines()
for line in lines:
line = line.strip()
if len(line) > 0:
(tag, list) = line.split('[')[0].strip(), line.split('[')[1].strip().replace(']','')
list = list.split(',')
ablate_lists[tag] = [int(i) for i in list]
return ablate_lists
def clear_sensitivity_data():
data.iteration = 0
data.batch_contributions = []
data.tag_contributions = {}
data.tag_counts = {}
data.sensitivity_matrices = []
data.sensitivity_matrices_combined = []
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Tuning with NCRF++')
# parser.add_argument('--status', choices=['train', 'decode'], help='update algorithm', default='train')
parser.add_argument('--config', help='Configuration File', default='None')
parser.add_argument('--wordemb', help='Embedding for words', default='None')
parser.add_argument('--charemb', help='Embedding for chars', default='None')
parser.add_argument('--status', choices=['train', 'decode'], help='update algorithm', default='train')
parser.add_argument('--savemodel', default="data/model/saved_model.lstmcrf.")
parser.add_argument('--savedset', help='Dir of saved data setting')
parser.add_argument('--train', default="data/conll03/train.bmes")
parser.add_argument('--dev', default="data/conll03/dev.bmes" )
parser.add_argument('--test', default="data/conll03/test.bmes")
parser.add_argument('--seg', default="True")
parser.add_argument('--raw')
parser.add_argument('--loadmodel')
parser.add_argument('--output')
parser.add_argument('--loadtotest', help='Load the model just to test it')
parser.add_argument('--pretrainedmodelpath', help='Path to a pretrained model that you just want to test',
default=DEFAULT_TRAINED_FILE)
parser.add_argument('--ablate', help='how many neurons to ablate', default=0) # indicate number of neurons to ablate
# Importance.txt is generated by importance_matrix() (automatically reading this file is a TODO)
parser.add_argument('--ablate_file', help='list of neurons to ablate')
args = parser.parse_args()
data = Data()
data.HP_gpu = torch.cuda.is_available()
if args.config == 'None':
data.train_dir = args.train
data.dev_dir = args.dev
data.test_dir = args.test
data.model_dir = args.savemodel
data.dset_dir = args.savedset
print("Save dset directory:",data.dset_dir)
save_model_dir = args.savemodel
data.word_emb_dir = args.wordemb
data.char_emb_dir = args.charemb
if args.seg.lower() == 'true':
data.seg = True
else:
data.seg = False
print("Seed num:",seed_num)
else:
data.read_config(args.config)
# adding arg for pretrained model path
data.pretrained_model_path = args.pretrainedmodelpath
data.ablate_num = int(args.ablate)
# data.show_data_summary()
status = data.status.lower()
print("Seed num:",seed_num)
if status == 'train':
print("MODEL: train")
data_initialization(data) # set up alphabets
data.generate_instance('train')
data.generate_instance('dev')
data.generate_instance('test')
data.build_pretrain_emb()
if not args.loadtotest:
print("Training model, not just testing because --loadtotest is {}".format(args.loadtotest))
print("Loading ablation file even though it's just a placeholder")
debug_ablation = False
if debug_ablation:
data.ablate_list = load_ablation_file() # TODO: file not found
tag_list = data.ablate_list.keys()
train(data)
else:
if args.ablate:
data.ablate_num = int(args.ablate)
print("Loading model to test.")
data.ablate_list = load_ablation_file()
tag_list = data.ablate_list.keys()
# todo: command line arg for specific current ablate index
# todo: command line arg for intervals
for tag in tag_list:
data.ablate_tag = tag
data.current_ablate_ind[tag] = 0
data.acc_chart[data.ablate_tag] = {} # clear accuracy dict of lists for the tag
for i in range(0, data.ablate_num + 1):
data.current_ablate_ind[tag] = i #+= 1 # todo: option to skip by different interval like every 5
clear_sensitivity_data()
load_model_to_test(data, tag=tag)
# print out acc_chart
#for tag in data.ablate_list:
print ('{} ABLATION RESULTS:'.format(tag))
degradations = {}
for t in tag_list:
print("\tTag: {}, Decr. Accs: {}".format(t, data.acc_chart[tag][t]))
degradations[t] = \
[data.acc_chart[tag][t][ind] - data.acc_chart[tag][t][0] for ind in range (0, data.ablate_num+1)]
print("\t\tDegradation={})".format(degradations[t]))
if (t==tag):
# ablation tag, so use bolder symbol
plt.plot(degradations[t], 'bs', label=t)
else:
plt.plot(degradations[t], label=t)
plt.title(tag, fontsize=18)
plt.legend()
plt.savefig("{}_chart.png".format(tag))
plt.clf() # clear the plot -was plot.show()
elif status == 'decode':
print("MODEL: decode")
data.load(data.dset_dir)
data.read_config(args.config)
print(data.raw_dir)
# exit(0)
data.show_data_summary()
data.generate_instance('raw')
print("nbest: %s"%(data.nbest))
decode_results, pred_scores = load_model_decode(data, 'raw')
if data.nbest and not data.sentence_classification:
data.write_nbest_decoded_results(decode_results, pred_scores, 'raw')
else:
data.write_decoded_results(decode_results, 'raw')
else:
print("Invalid argument! Please use valid arguments! (train/test/decode)")
| 44.199554 | 219 | 0.645749 | [
"Apache-2.0"
] | DeniseMak/ner-neuron | main.py | 39,647 | Python |
import dash
import dash_core_components as dcc
import dash_html_components as html
import pandas as pd
import plotly.graph_objs as go
from pymongo import MongoClient
import json
import os
client = MongoClient(os.environ.get("DATABASE"))
db = client.politics.brexit
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
data = list(db.find({}, {"_id": 0}))
data_df = pd.json_normalize(data).set_index('timestamp')
top_columns = data_df.sum().sort_values(ascending=False)
top_10 = top_columns[0:10].index.tolist()
top10df = data_df[top_10].fillna(0).astype(int)
df = top10df[-12:]
cols = list(df.columns)
# Set up plot
fig = go.Figure()
colors = {
'background': '#111111',
'text': '#7FDBFF'
}
app.layout = html.Div(children=[
html.H1(
children='#brexit',
style={
'textAlign': 'center',
'color': colors['text']
}
),
html.Div(children='Top Keywords used with the #brexit hashtag in the last 12 hours', style={
'textAlign': 'center',
'color': colors['text']
}),
dcc.Graph(
id='test-plot',
figure={
'data': [
go.Scatter(
x=df.index,
y=df[i],
name=i.replace('words.', ''),
line=dict(shape='spline', width=2),
opacity=0.8
) for i in cols[0:10]
],
'layout': go.Layout(
xaxis={'title': 'Time'},
yaxis={'title': 'Frequency'},
margin={'l': 40, 'b': 80, 't': 10, 'r': 10},
legend={'x': 0, 'y': 1},
hovermode='closest'
)
},
),
dcc.Interval(
id='interval-component',
interval=60*1000, # in milliseconds
n_intervals=0
)
])
if __name__ == '__main__':
app.run_server(debug=True)
| 24.432099 | 96 | 0.547246 | [
"MIT"
] | brittwitham/brexit-word-freq | brexit-word-freq/app.py | 1,979 | Python |
import pytest
from nbformat import notebooknode
from nb2hugo.preprocessors import FixLatexPreprocessor
@pytest.fixture
def preprocessor():
"""Return an instance of FixLatexPreprocessor."""
return FixLatexPreprocessor()
source = (
'Some text with an inline equality $escaped\_0 = lower_0$.\n'
'And a display equality:\n'
'$$escaped\_1 = subscript_1.$$\n'
'And a second one on multiple lines:\n'
'$$\n'
'escaped\_2\n'
'$$\n'
)
dollars_processed = (
'Some text with an inline equality \\\\(escaped\_0 = lower_0\\\\).\n'
'And a display equality:\n'
'\\\\[escaped\_1 = subscript_1.\\\\]\n'
'And a second one on multiple lines:\n'
'\\\\[\n'
'escaped\_2\n'
'\\\\]\n'
)
fully_processed = (
'Some text with an inline equality \\\\(escaped\\\\_0 = lower_0\\\\).\n'
'And a display equality:\n'
'\\\\[escaped\\\\_1 = subscript_1.\\\\]\n'
'And a second one on multiple lines:\n'
'\\\\[\n'
'escaped\\\\_2\n'
'\\\\]\n'
)
def test_replace_latex_enclosing_dollars(preprocessor):
result = preprocessor._replace_latex_enclosing_dollars(source)
assert result == dollars_processed
def test_fix_latex_escaped_underscores(preprocessor):
result = preprocessor._fix_latex_escaped_underscores(dollars_processed)
assert result == fully_processed
raw_cell, code_cell, markdown_cell, expected_markdown_cell = [
notebooknode.from_dict({"cell_type": cell_type,
"metadata": {},
"source": source})
for cell_type, source in [('raw', source),
('code', source),
('markdown', source),
('markdown', fully_processed)]
]
@pytest.mark.parametrize("input_cell, expected_cell", [
(raw_cell, raw_cell),
(code_cell, code_cell),
(markdown_cell, expected_markdown_cell),
])
def test_preprocess_cell(preprocessor, input_cell, expected_cell):
assert preprocessor.preprocess_cell(input_cell, None, None) == (expected_cell, None)
| 30.940299 | 88 | 0.630005 | [
"MIT"
] | IMTorgDemo/hugo-nb2hugo | tests/preprocessors/test_fixlatex.py | 2,073 | Python |
"""WARNING: This code is not recommanded. It is not finished, it is
slower then the version in sandbox/neighbours.py, and it do not work
on the GPU.
We only keep this version here as it is a little bit more generic, so
it cover more cases. But thoses cases aren't needed frequently, so you
probably don't want to use this version, go see neighbours.py!!!!!!!
"""
import numpy
from six.moves import xrange
import six.moves.builtins as builtins
import theano
from theano import gof, Op
class NeighbourhoodsFromImages(Op):
__props__ = ("n_dims_before", "dims_neighbourhoods", "strides",
"ignore_border", "inverse")
def __init__(self, n_dims_before, dims_neighbourhoods,
strides=None, ignore_border=False, inverse=False):
"""
This extracts neighbourhoods from "images", but in a
dimension-generic manner.
In the 2D case, this is similar to downsampling, but instead of reducing
a group of 2x2 pixels (for example) to a single new pixel in the output,
you place those 4 pixels in a row.
For example, say you have this 2x4 image::
[ [ 0.5, 0.6, 0.7, 0.8 ],
[ 0.1, 0.2, 0.3, 0.4 ] ]
and you want to extract 2x2 neighbourhoods. This op would then produce::
[ [ [ 0.5, 0.6, 0.1, 0.2 ] ], # the first 2x2 group of pixels
[ [ 0.7, 0.8, 0.3, 0.4 ] ] ] # the second one
so think of a 2D downsampling where each pixel of the resulting array
is replaced by an array containing the (flattened) pixels of the
corresponding neighbourhood.
If you provide a stack of 2D image, or multiple stacks, each image
will be treated independently, and the first dimensions of the array
will be preserved as such.
This also makes sense in the 1D or 3D case. Below I'll still be calling
those "images", by analogy.
In the 1D case, you're
extracting subsequences from the original sequence. In the 3D case,
you're extracting cuboids. If you ever find a 4D use, tell me! It
should be possible, anyhow.
Parameters
----------
n_dims_before : int
Number of dimensions preceding the "images".
dims_neighbourhoods : tuple of ints
Exact shape of windows to be extracted (e.g. (2,2) in the case above).
n_dims_before + len(dims_neighbourhoods) should be equal to the
number of dimensions in the input given to the op.
strides : tuple of int
Number of elements to skip when moving to the next neighbourhood,
for each dimension of dims_neighbourhoods. There can be overlap
between neighbourhoods, or gaps.
ignore_border : bool
If the dimensions of the neighbourhoods don't exactly divide the
dimensions of the "images", you can either fill the last
neighbourhood with zeros (False) or drop it entirely (True).
inverse : bool
You shouldn't have to use this. Only used by child class
ImagesFromNeighbourhoods which simply reverses the assignment.
"""
self.n_dims_before = n_dims_before
self.dims_neighbourhoods = dims_neighbourhoods
if strides is not None:
self.strides = strides
else:
self.strides = dims_neighbourhoods
self.ignore_border = ignore_border
self.inverse = inverse
self.code_string, self.code = self.make_py_code()
def __str__(self):
return '%s{%s,%s,%s,%s}' % (self.__class__.__name__,
self.n_dims_before,
self.dims_neighbourhoods,
self.strides,
self.ignore_border)
def out_shape(self, input_shape):
dims = list(input_shape[:self.n_dims_before])
num_strides = [0 for i in xrange(len(self.strides))]
neigh_flattened_dim = 1
for i, ds in enumerate(self.dims_neighbourhoods):
cur_stride = self.strides[i]
input_dim = input_shape[i + self.n_dims_before]
target_dim = input_dim // cur_stride
if not self.ignore_border and (input_dim % cur_stride) != 0:
target_dim += 1
num_strides[i] = target_dim
dims.append(target_dim)
neigh_flattened_dim *= ds
dims.append(neigh_flattened_dim)
return dims, num_strides
# for inverse mode
# "output" here actually referes to the Op's input shape (but it's inverse mode)
def in_shape(self, output_shape):
out_dims = list(output_shape[:self.n_dims_before])
num_strides = []
# in the inverse case we don't worry about borders:
# they either have been filled with zeros, or have been cropped
for i, ds in enumerate(self.dims_neighbourhoods):
# the number of strides performed by NeighFromImg is
# directly given by this shape
num_strides.append(output_shape[self.n_dims_before + i])
# our Op's output image must be at least this wide
at_least_width = num_strides[i] * self.strides[i]
# ... which gives us this number of neighbourhoods
num_neigh = at_least_width // ds
if at_least_width % ds != 0:
num_neigh += 1
# making the final Op's output dimension this wide
out_dims.append(num_neigh * ds)
return out_dims, num_strides
def make_node(self, x):
x = theano.tensor.as_tensor_variable(x)
if self.inverse:
# +1 in the inverse case
if x.type.ndim != (self.n_dims_before +
len(self.dims_neighbourhoods) + 1):
raise TypeError()
else:
if x.type.ndim != (self.n_dims_before +
len(self.dims_neighbourhoods)):
raise TypeError()
return gof.Apply(self, [x], [x.type()])
def perform(self, node, inp, out):
x, = inp
z, = out
if self.inverse:
# +1 in the inverse case
if len(x.shape) != (self.n_dims_before +
len(self.dims_neighbourhoods) + 1):
raise ValueError("Images passed as input don't match the "
"dimensions passed when this (inversed) "
"Apply node was created")
prod = 1
for dim in self.dims_neighbourhoods:
prod *= dim
if x.shape[-1] != prod:
raise ValueError("Last dimension of neighbourhoods (%s) is not"
" the product of the neighbourhoods dimensions"
" (%s)" % (str(x.shape[-1]), str(prod)))
else:
if len(x.shape) != (self.n_dims_before +
len(self.dims_neighbourhoods)):
raise ValueError("Images passed as input don't match the "
"dimensions passed when this Apply node "
"was created")
if self.inverse:
input_shape, num_strides = self.in_shape(x.shape)
out_shape, dummy = self.out_shape(input_shape)
else:
input_shape = x.shape
out_shape, num_strides = self.out_shape(input_shape)
if z[0] is None:
if self.inverse:
z[0] = numpy.zeros(input_shape)
else:
z[0] = numpy.zeros(out_shape)
z[0] = theano._asarray(z[0], dtype=x.dtype)
exec(self.code)
def make_py_code(self):
code = self._py_outerloops()
for i in xrange(len(self.strides)):
code += self._py_innerloop(i)
code += self._py_assignment()
return code, builtins.compile(code, '<string>', 'exec')
def _py_outerloops(self):
code_before = ""
for dim_idx in xrange(self.n_dims_before):
code_before += ('\t' * (dim_idx)) + \
"for outer_idx_%d in xrange(input_shape[%d]):\n" % \
(dim_idx, dim_idx)
return code_before
def _py_innerloop(self, inner_dim_no):
base_indent = ('\t' * (self.n_dims_before + inner_dim_no * 2))
code_before = base_indent + \
"for stride_idx_%d in xrange(num_strides[%d]):\n" % \
(inner_dim_no, inner_dim_no)
base_indent += '\t'
code_before += base_indent + \
"dim_%d_offset = stride_idx_%d * self.strides[%d]\n" %\
(inner_dim_no, inner_dim_no, inner_dim_no)
code_before += base_indent + \
"max_neigh_idx_%d = input_shape[%d] - dim_%d_offset\n" % \
(inner_dim_no, self.n_dims_before + inner_dim_no, inner_dim_no)
code_before += base_indent + \
("for neigh_idx_%d in xrange(min(max_neigh_idx_%d,"
" self.dims_neighbourhoods[%d])):\n") %\
(inner_dim_no, inner_dim_no, inner_dim_no)
return code_before
def _py_flattened_idx(self):
return "+".join(["neigh_strides[%d]*neigh_idx_%d" % (i, i)
for i in xrange(len(self.strides))])
def _py_assignment(self):
input_idx = "".join(["outer_idx_%d," % (i,)
for i in xrange(self.n_dims_before)])
input_idx += "".join(["dim_%d_offset+neigh_idx_%d," %
(i, i) for i in xrange(len(self.strides))])
out_idx = "".join(
["outer_idx_%d," % (i,) for i in xrange(self.n_dims_before)] +
["stride_idx_%d," % (i,) for i in xrange(len(self.strides))])
out_idx += self._py_flattened_idx()
# return_val = '\t' * (self.n_dims_before + len(self.strides)*2)
# return_val += "print "+input_idx+"'\\n',"+out_idx+"\n"
return_val = '\t' * (self.n_dims_before + len(self.strides) * 2)
if self.inverse:
# remember z and x are inversed:
# z is the Op's output, but has input_shape
# x is the Op's input, but has out_shape
return_val += "z[0][%s] = x[%s]\n" % (input_idx, out_idx)
else:
return_val += "z[0][%s] = x[%s]\n" % (out_idx, input_idx)
return return_val
class ImagesFromNeighbourhoods(NeighbourhoodsFromImages):
def __init__(self, n_dims_before, dims_neighbourhoods,
strides=None, ignore_border=False):
NeighbourhoodsFromImages.__init__(self, n_dims_before,
dims_neighbourhoods,
strides=strides,
ignore_border=ignore_border,
inverse=True)
# and that's all there is to it
| 40.869403 | 84 | 0.570711 | [
"BSD-3-Clause"
] | jych/Theano | theano/sandbox/neighbourhoods.py | 10,953 | Python |
# ======🙋🙋🙋实现了 1个函数fn 映射 为1个URL处理函数!!!
import asyncio, os, inspect, logging, functools
from urllib import parse
from aiohttp import web
from apis import APIError
def get(path):
'''
Define decorator @get('/path')
'''
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kw):
return func(*args, **kw)
wrapper.__method__ = 'GET'
wrapper.__route__ = path
return wrapper
return decorator
def post(path):
'''
Define decorator @post('/path')
'''
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kw):
return func(*args, **kw)
wrapper.__method__ = 'POST'
wrapper.__route__ = path
return wrapper
return decorator
def get_required_kw_args(fn):
args = []
params = inspect.signature(fn).parameters
for name, param in params.items():
if param.kind == inspect.Parameter.KEYWORD_ONLY and param.default == inspect.Parameter.empty:
args.append(name)
return tuple(args)
def get_named_kw_args(fn):
args = []
params = inspect.signature(fn).parameters
for name, param in params.items():
if param.kind == inspect.Parameter.KEYWORD_ONLY:
args.append(name)
return tuple(args)
def has_named_kw_args(fn):
params = inspect.signature(fn).parameters
for name, param in params.items():
if param.kind == inspect.Parameter.KEYWORD_ONLY:
return True
def has_var_kw_arg(fn):
params = inspect.signature(fn).parameters
for name, param in params.items():
if param.kind == inspect.Parameter.VAR_KEYWORD:
return True
def has_request_arg(fn):
sig = inspect.signature(fn)
params = sig.parameters
found = False
for name, param in params.items():
if name == 'request':
found = True
continue
if found and (param.kind != inspect.Parameter.VAR_POSITIONAL and param.kind != inspect.Parameter.KEYWORD_ONLY and param.kind != inspect.Parameter.VAR_KEYWORD):
raise ValueError('request parameter must be the last named parameter in function: %s%s' % (fn.__name__, str(sig)))
return found
class RequestHandler(object):
def __init__(self, app, fn):
self._app = app
self._func = fn
self._has_request_arg = has_request_arg(fn)
self._has_var_kw_arg = has_var_kw_arg(fn)
self._has_named_kw_args = has_named_kw_args(fn)
self._named_kw_args = get_named_kw_args(fn)
self._required_kw_args = get_required_kw_args(fn)
@asyncio.coroutine
def __call__(self, request):
kw = None
if self._has_var_kw_arg or self._has_named_kw_args or self._required_kw_args:
if request.method == 'POST':
if not request.content_type:
return web.HTTPBadRequest('Missing Content-Type.')
ct = request.content_type.lower()
if ct.startswith('application/json'):
params = yield from request.json()
if not isinstance(params, dict):
return web.HTTPBadRequest('JSON body must be object.')
kw = params
elif ct.startswith('application/x-www-form-urlencoded') or ct.startswith('multipart/form-data'):
params = yield from request.post()
kw = dict(**params)
else:
return web.HTTPBadRequest('Unsupported Content-Type: %s' % request.content_type)
if request.method == 'GET':
qs = request.query_string
if qs:
kw = dict()
for k, v in parse.parse_qs(qs, True).items():
kw[k] = v[0]
if kw is None:
kw = dict(**request.match_info)
else:
if not self._has_var_kw_arg and self._named_kw_args:
# remove all unamed kw:
copy = dict()
for name in self._named_kw_args:
if name in kw:
copy[name] = kw[name]
kw = copy
# check named arg:
for k, v in request.match_info.items():
if k in kw:
logging.warning('Duplicate arg name in named arg and kw args: %s' % k)
kw[k] = v
if self._has_request_arg:
kw['request'] = request
# check required kw:
if self._required_kw_args:
for name in self._required_kw_args:
if not name in kw:
return web.HTTPBadRequest('Missing argument: %s' % name)
logging.info('call with args: %s' % str(kw))
try:
r = yield from self._func(**kw)
return r
except APIError as e:
return dict(error=e.error, data=e.data, message=e.message)
def add_static(app):
path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'static')
app.router.add_static('/static/', path)
logging.info('add static %s => %s' % ('/static/', path))
def add_route(app, fn):
method = getattr(fn, '__method__', None)
path = getattr(fn, '__route__', None)
if path is None or method is None:
raise ValueError('@get or @post not defined in %s.' % str(fn))
if not asyncio.iscoroutinefunction(fn) and not inspect.isgeneratorfunction(fn):
fn = asyncio.coroutine(fn)
logging.info('add route %s %s => %s(%s)' % (method, path, fn.__name__, ', '.join(inspect.signature(fn).parameters.keys())))
app.router.add_route(method, path, RequestHandler(app, fn))
def add_routes(app, module_name):
n = module_name.rfind('.')
if n == (-1):
mod = __import__(module_name, globals(), locals())
else:
name = module_name[n+1:]
mod = getattr(__import__(module_name[:n], globals(), locals(), [name]), name)
for attr in dir(mod):
if attr.startswith('_'):
continue
fn = getattr(mod, attr)
if callable(fn):
method = getattr(fn, '__method__', None)
path = getattr(fn, '__route__', None)
if method and path:
add_route(app, fn) | 36.567251 | 167 | 0.581481 | [
"MIT"
] | qsyPython/Python_play_now | qiushaoyi/programs/qsy_program_codes/python3-webapp/www/coroweb.py | 6,296 | Python |
import uuid
def get_unique_filename(instance, filename):
ext = filename.split('.')[-1]
filename = '{}.{}'.format(uuid.uuid4(), ext)
return 'user_{0}/{1}'.format(instance.author.id, filename)
| 25.625 | 62 | 0.653659 | [
"Apache-2.0"
] | yuramorozov01/courses_system | courses/src/base_app/utils.py | 205 | Python |
import torch.nn as nn
import torch
import torch.nn.functional as F
class CombinationModule(nn.Module):
def __init__(self, c_low, c_up, batch_norm=False, group_norm=False, instance_norm=False):
super(CombinationModule, self).__init__()
if batch_norm:
self.up = nn.Sequential(nn.Conv2d(c_low, c_up, kernel_size=3, padding=1, stride=1),
nn.BatchNorm2d(c_up),
nn.ReLU(inplace=True))
self.cat_conv = nn.Sequential(nn.Conv2d(c_up*2, c_up, kernel_size=1, stride=1),
nn.BatchNorm2d(c_up),
nn.ReLU(inplace=True))
elif group_norm:
self.up = nn.Sequential(nn.Conv2d(c_low, c_up, kernel_size=3, padding=1, stride=1),
nn.GroupNorm(num_groups=32, num_channels=c_up),
nn.ReLU(inplace=True))
self.cat_conv = nn.Sequential(nn.Conv2d(c_up * 2, c_up, kernel_size=1, stride=1),
nn.GroupNorm(num_groups=32, num_channels=c_up),
nn.ReLU(inplace=True))
elif instance_norm:
self.up = nn.Sequential(nn.Conv2d(c_low, c_up, kernel_size=3, padding=1, stride=1),
nn.InstanceNorm2d(num_features=c_up),#track_running_stats=True),
nn.ReLU(inplace=True))
self.cat_conv = nn.Sequential(nn.Conv2d(c_up * 2, c_up, kernel_size=1, stride=1),
nn.InstanceNorm2d(num_features=c_up),# track_running_stats=True),
nn.ReLU(inplace=True))
else:
self.up = nn.Sequential(nn.Conv2d(c_low, c_up, kernel_size=3, padding=1, stride=1),
nn.ReLU(inplace=True))
self.cat_conv = nn.Sequential(nn.Conv2d(c_up*2, c_up, kernel_size=1, stride=1),
nn.ReLU(inplace=True))
def forward(self, x_low, x_up):
x_low = self.up(F.interpolate(x_low, x_up.shape[2:], mode='bilinear', align_corners=False))
# if self.up[1].running_mean is not None:
# print(self.up[1].running_mean.shape)
return self.cat_conv(torch.cat((x_up, x_low), 1))
| 60.025 | 107 | 0.52853 | [
"MIT"
] | yijingru/ObjGuided-Instance-Segmentation | models/layers.py | 2,401 | Python |
# encoding: utf-8
# module Autodesk.Civil.Settings calls itself Settings
# from AeccDbMgd, Version=13.3.854.0, Culture=neutral, PublicKeyToken=null, AeccPressurePipesMgd, Version=13.3.854.0, Culture=neutral, PublicKeyToken=null
# by generator 1.145
# no doc
# no imports
# no functions
# classes
class AbbreviationAlignmentEnhancedType(Enum):
""" enum AbbreviationAlignmentEnhancedType, values: AlignmentBeginningPoint (402706556), AlignmentEndPoint (402706557), CompoundSpiralLargeRadiusAtBeginning (402706566), CompoundSpiralLargeRadiusAtEnd (402706567), CompoundSpiralSmallRadiusAtBeginning (402706568), CompoundSpiralSmallRadiusAtEnd (402706569), CurveBeginning (402706560), CurveEnd (402706561), LineBeginning (402706558), LineEnd (402706559), SimpleSpiralLargeRadiusAtBeginning (402706562), SimpleSpiralLargeRadiusAtEnd (402706563), SimpleSpiralSmallRadiusAtBeginning (402706564), SimpleSpiralSmallRadiusAtEnd (402706565) """
AlignmentBeginningPoint = None
AlignmentEndPoint = None
CompoundSpiralLargeRadiusAtBeginning = None
CompoundSpiralLargeRadiusAtEnd = None
CompoundSpiralSmallRadiusAtBeginning = None
CompoundSpiralSmallRadiusAtEnd = None
CurveBeginning = None
CurveEnd = None
LineBeginning = None
LineEnd = None
SimpleSpiralLargeRadiusAtBeginning = None
SimpleSpiralLargeRadiusAtEnd = None
SimpleSpiralSmallRadiusAtBeginning = None
SimpleSpiralSmallRadiusAtEnd = None
value__ = None
class AbbreviationAlignmentType(Enum):
""" enum AbbreviationAlignmentType, values: AlignmentBeginning (67162235), AlignmentEnd (67162234), CompoundCurveCurveIntersect (67162197), CurveSpiralIntersect (67162201), CurveTangentIntersect (67162196), MidCurvePoint (67162254), ReverseCurveCurveIntersect (67162198), ReverseSpiralIntersect (67162204), SpiralCurveIntersect (67162202), SpiralSpiralIntersect (67162203), SpiralTangentIntersect (67162200), StationEquationDecreasing (67162253), StationEquationIncreasing (67162252), TangentCurveIntersect (67162195), TangentSpiralIntersect (67162199), TangentTangentIntersect (67162194) """
AlignmentBeginning = None
AlignmentEnd = None
CompoundCurveCurveIntersect = None
CurveSpiralIntersect = None
CurveTangentIntersect = None
MidCurvePoint = None
ReverseCurveCurveIntersect = None
ReverseSpiralIntersect = None
SpiralCurveIntersect = None
SpiralSpiralIntersect = None
SpiralTangentIntersect = None
StationEquationDecreasing = None
StationEquationIncreasing = None
TangentCurveIntersect = None
TangentSpiralIntersect = None
TangentTangentIntersect = None
value__ = None
class AbbreviationCantType(Enum):
""" enum AbbreviationCantType, values: BeginAlignment (67163513), BeginFullCant (67163510), BeginLevelRail (67163509), EndAlignment (67163514), EndFullCant (67163511), EndLevelRail (67163508), Manual (67163512) """
BeginAlignment = None
BeginFullCant = None
BeginLevelRail = None
EndAlignment = None
EndFullCant = None
EndLevelRail = None
Manual = None
value__ = None
class AbbreviationProfileType(Enum):
""" enum AbbreviationProfileType, values: BeginVerticalCurve (67173890), BeginVerticalCurveElevation (67173892), BeginVerticalCurveStation (67173891), CurveCoefficient (67173898), EndVerticalCurve (67173893), EndVerticalCurveElevation (67173895), EndVerticalCurveStation (67173894), GradeBreak (67173889), GradeChange (67173899), HighPoint (67173896), LowPoint (67173897), OverallHighPoint (67173909), OverallLowPoint (67173910), PointOfVerticalIntersection (67173888), ProfileEnd (67173902), ProfileStart (67173901), VerticalCompoundCurveIntersect (67173903), VerticalCompoundCurveIntersectElevation (67173906), VerticalCompoundCurveIntersectStation (67173905), VerticalReverseCurveIntersect (67173904), VerticalReverseCurveIntersectElevation (67173908), VerticalReverseCurveIntersectStation (67173907) """
BeginVerticalCurve = None
BeginVerticalCurveElevation = None
BeginVerticalCurveStation = None
CurveCoefficient = None
EndVerticalCurve = None
EndVerticalCurveElevation = None
EndVerticalCurveStation = None
GradeBreak = None
GradeChange = None
HighPoint = None
LowPoint = None
OverallHighPoint = None
OverallLowPoint = None
PointOfVerticalIntersection = None
ProfileEnd = None
ProfileStart = None
value__ = None
VerticalCompoundCurveIntersect = None
VerticalCompoundCurveIntersectElevation = None
VerticalCompoundCurveIntersectStation = None
VerticalReverseCurveIntersect = None
VerticalReverseCurveIntersectElevation = None
VerticalReverseCurveIntersectStation = None
class AbbreviationSuperelevationType(Enum):
""" enum AbbreviationSuperelevationType, values: BeginFullSuper (67163478), BeginNormalCrown (67163476), BeginNormalShoulder (67163480), BeginOfAlignment (67163474), BeginShoulderRollover (67163506), EndFullSuper (67163479), EndNormalCrown (67163477), EndNormalShoulder (67163481), EndOfAlignment (67163475), EndShoulderRollover (67163507), LevelCrown (67163482), LowShoulderMatch (67163483), Manual (67163486), ReverseCrown (67163484), ShoulderBreakover (67163485) """
BeginFullSuper = None
BeginNormalCrown = None
BeginNormalShoulder = None
BeginOfAlignment = None
BeginShoulderRollover = None
EndFullSuper = None
EndNormalCrown = None
EndNormalShoulder = None
EndOfAlignment = None
EndShoulderRollover = None
LevelCrown = None
LowShoulderMatch = None
Manual = None
ReverseCrown = None
ShoulderBreakover = None
value__ = None
class AutomaticManual(Enum):
""" enum AutomaticManual, values: Automatic (0), AutomaticObject (1), Manual (2), None (3) """
Automatic = None
AutomaticObject = None
Manual = None
None = None
value__ = None
class DrawingUnitType(Enum):
""" enum DrawingUnitType, values: Feet (30), Meters (2) """
Feet = None
Meters = None
value__ = None
class GeographicCoordinateType(Enum):
""" enum GeographicCoordinateType, values: LatLong (0), LongLat (1) """
LatLong = None
LongLat = None
value__ = None
class GridCoordinateType(Enum):
""" enum GridCoordinateType, values: EastingNorthing (0), NorthingEasting (1) """
EastingNorthing = None
NorthingEasting = None
value__ = None
class GridScaleFactorType(Enum):
""" enum GridScaleFactorType, values: PrismodialFormula (3), ReferencePoint (2), Unity (0), UserDefined (1) """
PrismodialFormula = None
ReferencePoint = None
Unity = None
UserDefined = None
value__ = None
class ImperialToMetricConversionType(Enum):
""" enum ImperialToMetricConversionType, values: InternationalFoot (536870912), UsSurveyFoot (1073741824) """
InternationalFoot = None
UsSurveyFoot = None
value__ = None
class LandXMLAngularUnits(Enum):
""" enum LandXMLAngularUnits, values: DegreesDecimal (0), DegreesDms (1), Grads (2), Radians (3) """
DegreesDecimal = None
DegreesDms = None
Grads = None
Radians = None
value__ = None
class LandXMLAttributeExportType(Enum):
""" enum LandXMLAttributeExportType, values: Disabled (0), FullDescription (2), RawDescription (1) """
Disabled = None
FullDescription = None
RawDescription = None
value__ = None
class LandXMLConflictResolutionType(Enum):
""" enum LandXMLConflictResolutionType, values: Append (2), Skip (0), Update (1) """
Append = None
Skip = None
Update = None
value__ = None
class LandXMLImperialUnitType(Enum):
""" enum LandXMLImperialUnitType, values: Foot (30), Inch (31), Mile (44), Yard (33) """
Foot = None
Inch = None
Mile = None
value__ = None
Yard = None
class LandXMLLinearUnits(Enum):
""" enum LandXMLLinearUnits, values: InternationalFoot (30), SurveyFoot (54) """
InternationalFoot = None
SurveyFoot = None
value__ = None
class LandXMLMetricUnitType(Enum):
""" enum LandXMLMetricUnitType, values: CentiMeter (24), DeciMeter (23), KiloMeter (20), Meter (2), MilliMeter (25) """
CentiMeter = None
DeciMeter = None
KiloMeter = None
Meter = None
MilliMeter = None
value__ = None
class LandXMLPointDescriptionType(Enum):
""" enum LandXMLPointDescriptionType, values: UseCodeThenDesc (2), UseCodeValue (0), UseDescThenCode (3), UseDescValue (1) """
UseCodeThenDesc = None
UseCodeValue = None
UseDescThenCode = None
UseDescValue = None
value__ = None
class LandXMLSurfaceDataExportType(Enum):
""" enum LandXMLSurfaceDataExportType, values: PointsAndFaces (1), PointsOnly (0) """
PointsAndFaces = None
PointsOnly = None
value__ = None
class LandXMLSurfaceDataImportType(Enum):
""" enum LandXMLSurfaceDataImportType, values: FullImport (1), QuickImport (0) """
FullImport = None
QuickImport = None
value__ = None
class LocalCoordinateType(Enum):
""" enum LocalCoordinateType, values: EastingNorthing (0), NorthingEasting (1), XY (2), YX (3) """
EastingNorthing = None
NorthingEasting = None
value__ = None
XY = None
YX = None
class MapcheckAngleType(Enum):
""" enum MapcheckAngleType, values: Angle (1), DeflectionAngle (2), Direction (0) """
Angle = None
DeflectionAngle = None
Direction = None
value__ = None
class MapcheckCurveDirectionType(Enum):
""" enum MapcheckCurveDirectionType, values: Clockwise (0), CounterClockwise (1) """
Clockwise = None
CounterClockwise = None
value__ = None
class MapcheckSideType(Enum):
""" enum MapcheckSideType, values: Curve (1), Line (0) """
Curve = None
Line = None
value__ = None
class MapcheckTraverseMethodType(Enum):
""" enum MapcheckTraverseMethodType, values: AcrossChord (0), ThroughRadius (1) """
AcrossChord = None
ThroughRadius = None
value__ = None
class ObjectLayerModifierType(Enum):
""" enum ObjectLayerModifierType, values: None (0), Prefix (1), Suffix (2) """
None = None
Prefix = None
Suffix = None
value__ = None
class SectionViewAnchorType(Enum):
""" enum SectionViewAnchorType, values: BottomCenter (7), BottomLeft (6), BottomRight (8), MiddleCenter (4), MiddleLeft (3), MiddleRight (5), TopCenter (1), TopLeft (0), TopRight (2) """
BottomCenter = None
BottomLeft = None
BottomRight = None
MiddleCenter = None
MiddleLeft = None
MiddleRight = None
TopCenter = None
TopLeft = None
TopRight = None
value__ = None
class SettingsAbbreviation(CivilWrapper<AcDbDatabase>):
# no doc
def Dispose(self):
""" Dispose(self: CivilWrapper<AcDbDatabase>, A_0: bool) """
pass
AlignmentGeoPointEntityData = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AlignmentGeoPointEntityData(self: SettingsAbbreviation) -> SettingsAbbreviationAlignmentEnhanced
"""
AlignmentGeoPointText = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AlignmentGeoPointText(self: SettingsAbbreviation) -> SettingsAbbreviationAlignment
"""
Cant = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Cant(self: SettingsAbbreviation) -> SettingsAbbreviationCant
"""
GeneralText = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GeneralText(self: SettingsAbbreviation) -> SettingsAbbreviationGeneral
"""
Profile = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Profile(self: SettingsAbbreviation) -> SettingsAbbreviationProfile
"""
Superelevation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Superelevation(self: SettingsAbbreviation) -> SettingsAbbreviationSuperelevation
"""
class SettingsAbbreviationAlignment(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def GetAlignmentAbbreviation(self, type):
""" GetAlignmentAbbreviation(self: SettingsAbbreviationAlignment, type: AbbreviationAlignmentType) -> str """
pass
def SetAlignmentAbbreviation(self, type, value):
""" SetAlignmentAbbreviation(self: SettingsAbbreviationAlignment, type: AbbreviationAlignmentType, value: str) """
pass
class SettingsAbbreviationAlignmentEnhanced(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def GetAlignmentEnhancedAbbreviation(self, type):
""" GetAlignmentEnhancedAbbreviation(self: SettingsAbbreviationAlignmentEnhanced, type: AbbreviationAlignmentEnhancedType) -> str """
pass
def SetAlignmentEnhancedAbbreviation(self, type, newValue):
""" SetAlignmentEnhancedAbbreviation(self: SettingsAbbreviationAlignmentEnhanced, type: AbbreviationAlignmentEnhancedType, newValue: str) """
pass
class SettingsAbbreviationCant(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def GetCantAbbreviation(self, type):
""" GetCantAbbreviation(self: SettingsAbbreviationCant, type: AbbreviationCantType) -> str """
pass
def SetCantAbbreviation(self, type, newValue):
""" SetCantAbbreviation(self: SettingsAbbreviationCant, type: AbbreviationCantType, newValue: str) """
pass
class SettingsAbbreviationGeneral(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Infinity = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Infinity(self: SettingsAbbreviationGeneral) -> str
Set: Infinity(self: SettingsAbbreviationGeneral) = value
"""
Left = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Left(self: SettingsAbbreviationGeneral) -> str
Set: Left(self: SettingsAbbreviationGeneral) = value
"""
Right = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Right(self: SettingsAbbreviationGeneral) -> str
Set: Right(self: SettingsAbbreviationGeneral) = value
"""
class SettingsAbbreviationProfile(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def GetProfileAbbreviation(self, type):
""" GetProfileAbbreviation(self: SettingsAbbreviationProfile, type: AbbreviationProfileType) -> str """
pass
def SetProfileAbbreviation(self, type, newValue):
""" SetProfileAbbreviation(self: SettingsAbbreviationProfile, type: AbbreviationProfileType, newValue: str) """
pass
class SettingsAbbreviationSuperelevation(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def GetSuperelevationAbbreviation(self, type):
""" GetSuperelevationAbbreviation(self: SettingsAbbreviationSuperelevation, type: AbbreviationSuperelevationType) -> str """
pass
def SetSuperelevationAbbreviation(self, type, newValue):
""" SetSuperelevationAbbreviation(self: SettingsAbbreviationSuperelevation, type: AbbreviationSuperelevationType, newValue: str) """
pass
class SettingsAmbient(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
@staticmethod # known case of __new__
def __new__(self, *args): #cannot find CLR constructor
""" __new__(cls: type, root: SettingsRoot, path: str) """
pass
Acceleration = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Acceleration(self: SettingsAmbient) -> SettingsAcceleration
"""
Angle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Angle(self: SettingsAmbient) -> SettingsAngle
"""
Area = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Area(self: SettingsAmbient) -> SettingsArea
"""
Coordinate = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Coordinate(self: SettingsAmbient) -> SettingsCoordinate
"""
DegreeOfCurvature = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DegreeOfCurvature(self: SettingsAmbient) -> SettingsDegreeOfCurvature
"""
Dimension = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Dimension(self: SettingsAmbient) -> SettingsDimension
"""
Direction = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Direction(self: SettingsAmbient) -> SettingsDirection
"""
Distance = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Distance(self: SettingsAmbient) -> SettingsDistance
"""
Elevation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Elevation(self: SettingsAmbient) -> SettingsElevation
"""
General = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: General(self: SettingsAmbient) -> SettingsGeneral
"""
Grade = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Grade(self: SettingsAmbient) -> SettingsGrade
"""
GradeSlope = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GradeSlope(self: SettingsAmbient) -> SettingsGradeSlope
"""
GridCoordinate = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GridCoordinate(self: SettingsAmbient) -> SettingsGridCoordinate
"""
Labeling = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Labeling(self: SettingsAmbient) -> SettingsLabeling
"""
LatLong = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LatLong(self: SettingsAmbient) -> SettingsLatLong
"""
Pressure = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Pressure(self: SettingsAmbient) -> SettingsPressure
"""
Slope = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Slope(self: SettingsAmbient) -> SettingsSlope
"""
Speed = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Speed(self: SettingsAmbient) -> SettingsSpeed
"""
Station = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Station(self: SettingsAmbient) -> SettingsStation
"""
Time = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Time(self: SettingsAmbient) -> SettingsTime
"""
TransparentCommands = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TransparentCommands(self: SettingsAmbient) -> SettingsTransparentCommands
"""
Unitless = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Unitless(self: SettingsAmbient) -> SettingsUnitless
"""
Volume = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Volume(self: SettingsAmbient) -> SettingsVolume
"""
SettingsAcceleration = None
SettingsAngle = None
SettingsArea = None
SettingsCoordinate = None
SettingsDegreeOfCurvature = None
SettingsDimension = None
SettingsDirection = None
SettingsDistance = None
SettingsElevation = None
SettingsFormatNumber`1 = None
SettingsGeneral = None
SettingsGrade = None
SettingsGradeSlope = None
SettingsGridCoordinate = None
SettingsLabeling = None
SettingsLatLong = None
SettingsPressure = None
SettingsSlope = None
SettingsSpeed = None
SettingsStation = None
SettingsTime = None
SettingsTransparentCommands = None
SettingsUnitFormatNumber`2 = None
SettingsUnitless = None
SettingsUnitlessNumber = None
SettingsUnitNumber`1 = None
SettingsVolume = None
class SettingsAlignment(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AutomaticWideningAroundCurves = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AutomaticWideningAroundCurves(self: SettingsAlignment) -> SettingsAutomaticWideningAroundCurves
"""
CantOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CantOptions(self: SettingsAlignment) -> SettingsCantOptions
"""
ConstraintEditing = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ConstraintEditing(self: SettingsAlignment) -> SettingsConstraintEditing
"""
CriteriaBasedDesignOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CriteriaBasedDesignOptions(self: SettingsAlignment) -> SettingsCriteriaBasedDesignOptions
"""
Data = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Data(self: SettingsAlignment) -> SettingsData
"""
DefaultNameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DefaultNameFormat(self: SettingsAlignment) -> SettingsDefaultNameFormat
"""
DynamicAlignmentHighlight = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DynamicAlignmentHighlight(self: SettingsAlignment) -> SettingsDynamicAlignmentHighlight
"""
ImpliedPointOfIntersection = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ImpliedPointOfIntersection(self: SettingsAlignment) -> SettingsImpliedPointOfIntersection
"""
RailOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RailOptions(self: SettingsAlignment) -> SettingsRailAlignmentOptions
"""
StationIndexing = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: StationIndexing(self: SettingsAlignment) -> SettingsStationIndexing
"""
StyleSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: StyleSettings(self: SettingsAlignment) -> SettingsStyles
"""
SuperelevationOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SuperelevationOptions(self: SettingsAlignment) -> SettingsSuperelevationOptions
"""
SettingsAutomaticWideningAroundCurves = None
SettingsCantOptions = None
SettingsConstraintEditing = None
SettingsCriteriaBasedDesignOptions = None
SettingsData = None
SettingsDefaultNameFormat = None
SettingsDynamicAlignmentHighlight = None
SettingsImpliedPointOfIntersection = None
SettingsRailAlignmentOptions = None
SettingsStationIndexing = None
SettingsStyles = None
SettingsSuperelevationOptions = None
class SettingsAssembly(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsAssembly) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsAssembly) -> SettingsStyles
"""
SettingsNameFormat = None
SettingsStyles = None
class SettingsBuildingSite(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsBuildingSite) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsBuildingSite) -> SettingsStyles
"""
SettingsNameFormat = None
SettingsStyles = None
class SettingsCantView(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsCantView) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsCantView) -> SettingsStyles
"""
SettingsNameFormat = None
SettingsStyles = None
class SettingsCatchment(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameTemplate = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameTemplate(self: SettingsCatchment) -> PropertyString
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsCatchment) -> SettingsStyles
"""
SettingsStyles = None
class SettingsCmdAddAlignmentCurveTable(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddAlignmentCurveTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddAlignmentLineTable(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddAlignmentLineTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddAlignmentOffLbl(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddAlignmentOffXYLbl(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddAlignmentSegmentTable(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddAlignmentSegmentTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddAlignmentSpiralTable(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddAlignmentSpiralTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddAlignPointOfIntLbl(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddAlignPointOfIntLbls(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddAlignSegLbl(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddAlignSegLbls(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddAlignTagentLbl(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddAlignTagentLbls(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsPressureNetwork(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Cover = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Cover(self: SettingsPressureNetwork) -> SettingsDepthOfCover
"""
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsPressureNetwork) -> SettingsNameFormat
"""
ProfileLabelPlacement = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ProfileLabelPlacement(self: SettingsPressureNetwork) -> SettingsProfileLabelPlacement
"""
SectionLabelPlacement = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SectionLabelPlacement(self: SettingsPressureNetwork) -> SettingsSectionLabelPlacement
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsPressureNetwork) -> SettingsStyles
"""
SettingsDepthOfCover = None
SettingsNameFormat = None
SettingsProfileLabelPlacement = None
SettingsSectionLabelPlacement = None
SettingsStyles = None
class SettingsCmdAddAppurtTable(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddAppurtTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsSurface(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
ContourLabeling = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ContourLabeling(self: SettingsSurface) -> SettingsContourLabeling
"""
Defaults = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Defaults(self: SettingsSurface) -> SettingsDefaults
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsSurface) -> SettingsStyles
"""
SettingsContourLabeling = None
SettingsDefaults = None
SettingsStyles = None
class SettingsCmdAddContourLabeling(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddContourLabelingGroup(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AddContourLabeling = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AddContourLabeling(self: SettingsCmdAddContourLabelingGroup) -> SettingsCmdAddContourLabeling
"""
SettingsCmdAddContourLabeling = None
class SettingsCmdAddContourLabelingSingle(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddFittingTable(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddFittingTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsIntersection(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsIntersection) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsIntersection) -> SettingsStyles
"""
SettingsNameFormat = None
SettingsStyles = None
class SettingsCmdAddIntersectionLabel(SettingsIntersection):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsGeneral(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsGeneral) -> SettingsStyles
"""
SettingsStyles = None
class SettingsCmdAddLineBetweenPoints(SettingsGeneral):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsQuantityTakeoff(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsQuantityTakeoff) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsQuantityTakeoff) -> SettingsStyles
"""
SettingsNameFormat = None
SettingsStyles = None
class SettingsCmdAddMaterialVolumeTable(SettingsQuantityTakeoff):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddMaterialVolumeTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsPipeNetwork(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Default = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Default(self: SettingsPipeNetwork) -> SettingsDefault
"""
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsPipeNetwork) -> SettingsNameFormat
"""
ProfileLabelPlacement = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ProfileLabelPlacement(self: SettingsPipeNetwork) -> SettingsProfileLabelPlacement
"""
Rules = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Rules(self: SettingsPipeNetwork) -> SettingsRules
"""
SectionLabelPlacement = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SectionLabelPlacement(self: SettingsPipeNetwork) -> SettingsSectionLabelPlacement
"""
StormSewersMigration = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: StormSewersMigration(self: SettingsPipeNetwork) -> SettingsStormSewersMigration
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsPipeNetwork) -> SettingsStyles
"""
SettingsDefault = None
SettingsNameFormat = None
SettingsProfileLabelPlacement = None
SettingsRules = None
SettingsSectionLabelPlacement = None
SettingsStormSewersMigration = None
SettingsStyles = None
class SettingsCmdAddNetworkPartPlanLabel(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddNetworkPartProfLabel(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddNetworkPartSectLabel(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddNetworkPartsToProf(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddNetworkPipeTable(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddNetworkPipeTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddNetworkPlanLabels(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddNetworkProfLabels(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddNetworkSectLabels(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddNetworkStructTable(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddNetworkStructTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddNoteLabel(SettingsGeneral):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsParcel(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsParcel) -> SettingsStyles
"""
SettingsStyles = None
class SettingsCmdAddParcelAreaLabel(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddParcelCurveTable(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddParcelCurveTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddParcelLineLabel(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddParcelLineTable(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddParcelLineTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddParcelSegmentLabels(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Options = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Options(self: SettingsCmdAddParcelSegmentLabels) -> SettingsCmdOptions
"""
SettingsCmdOptions = None
class SettingsCmdAddParcelSegmentTable(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddParcelSegmentTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddParcelTable(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddParcelTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsPointCloud(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DefaultNameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DefaultNameFormat(self: SettingsPointCloud) -> SettingsDefaultNameFormat
"""
StyleSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: StyleSettings(self: SettingsPointCloud) -> SettingsStyles
"""
SettingsDefaultNameFormat = None
SettingsStyles = None
class SettingsCmdAddPointCloudPoints(SettingsPointCloud):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DefaultFileFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DefaultFileFormat(self: SettingsCmdAddPointCloudPoints) -> PropertyEnum[PointCloudDefaultFileExtensionType]
"""
class SettingsCmdAddPointsToSurface(SettingsPointCloud):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
MidOrdinateDistance = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MidOrdinateDistance(self: SettingsCmdAddPointsToSurface) -> PropertyDouble
"""
RegionOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RegionOption(self: SettingsCmdAddPointsToSurface) -> PropertyEnum[PointCloudRegionType]
"""
SurfaceOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SurfaceOption(self: SettingsCmdAddPointsToSurface) -> PropertyEnum[PointCloudSurfaceType]
"""
class SettingsPoint(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsPoint) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsPoint) -> SettingsStyles
"""
UpdatePoints = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: UpdatePoints(self: SettingsPoint) -> SettingsUpdatePoints
"""
SettingsNameFormat = None
SettingsStyles = None
SettingsUpdatePoints = None
class SettingsCmdAddPointTable(SettingsPoint):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddPointTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddPressurePartPlanLabel(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddPressurePartProfLabel(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddPressurePartsToProf(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddPressurePipeTable(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddPressurePipeTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddPressurePlanLabels(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddPressureProfLabels(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsProfileView(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Creation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Creation(self: SettingsProfileView) -> SettingsCreation
"""
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsProfileView) -> SettingsNameFormat
"""
ProjectionLabelPlacement = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ProjectionLabelPlacement(self: SettingsProfileView) -> SettingsProjectionLabelPlacement
"""
SplitOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SplitOptions(self: SettingsProfileView) -> SettingsSplitOptions
"""
StackedOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: StackedOptions(self: SettingsProfileView) -> SettingsStackedOptions
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsProfileView) -> SettingsStyles
"""
SettingsCreation = None
SettingsNameFormat = None
SettingsProjectionLabelPlacement = None
SettingsSplitOptions = None
SettingsStackedOptions = None
SettingsStyles = None
class SettingsCmdAddProfileViewDepthLbl(SettingsProfileView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddProfileViewStaElevLbl(SettingsProfileView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsSectionView(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsSectionView) -> SettingsNameFormat
"""
ProjectionLabelPlacement = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ProjectionLabelPlacement(self: SettingsSectionView) -> SettingsProjectionLabelPlacement
"""
SectionViewCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SectionViewCreation(self: SettingsSectionView) -> SettingsSectionViewCreation
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsSectionView) -> SettingsStyles
"""
SettingsNameFormat = None
SettingsProjectionLabelPlacement = None
SettingsSectionViewCreation = None
SettingsStyles = None
class SettingsCmdAddSectionViewGradeLbl(SettingsSectionView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddSectionViewOffElevLbl(SettingsSectionView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddSegmentLabel(SettingsGeneral):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddSegmentLabels(SettingsGeneral):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddSpanningPipePlanLabel(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddSpanningPipeProfLabel(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddSpotElevLabelsOnGrid(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddSurfaceBoundaries(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DataOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DataOptions(self: SettingsCmdAddSurfaceBoundaries) -> SettingsCmdAddDataOptions
"""
SettingsCmdAddDataOptions = None
class SettingsCmdAddSurfaceBreaklines(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DataOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DataOptions(self: SettingsCmdAddSurfaceBreaklines) -> SettingsCmdAddDataOptions
"""
SettingsCmdAddDataOptions = None
class SettingsCmdAddSurfaceContours(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AddDataOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AddDataOptions(self: SettingsCmdAddSurfaceContours) -> SettingsCmdAddDataOptions
"""
SettingsCmdAddDataOptions = None
class SettingsCmdAddSurfaceDemFile(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
ImportOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ImportOptions(self: SettingsCmdAddSurfaceDemFile) -> SettingsCmdImportOptions
"""
SettingsCmdImportOptions = None
class SettingsCmdAddSurfaceDrawingObjects(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DataOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DataOptions(self: SettingsCmdAddSurfaceDrawingObjects) -> SettingsCmdAddDataOptions
"""
SettingsCmdAddDataOptions = None
class SettingsCmdAddSurfaceFigSurveyQuery(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DataOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DataOptions(self: SettingsCmdAddSurfaceFigSurveyQuery) -> SettingsCmdAddDataOptions
"""
SettingsCmdAddDataOptions = None
class SettingsCmdAddSurfacePointSurveyQuery(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddSurfaceSlopeLabel(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddSurfaceSpotElevLabel(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsSurvey(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsSurvey) -> SettingsStyles
"""
SettingsStyles = None
class SettingsCmdAddSvFigureLabel(SettingsSurvey):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddSvFigureSegmentLabel(SettingsSurvey):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddSvFigureSegmentLabels(SettingsSurvey):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdAddTotalVolumeTable(SettingsQuantityTakeoff):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdAddTotalVolumeTable) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsCmdAddWidening(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
LinearTransitionAroundCurves = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LinearTransitionAroundCurves(self: SettingsCmdAddWidening) -> SettingsCmdLinearTransitionAroundCurves
"""
WideningOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: WideningOptions(self: SettingsCmdAddWidening) -> SettingsCmdWideningOptions
"""
SettingsCmdLinearTransitionAroundCurves = None
SettingsCmdWideningOptions = None
class SettingsCmdAssignPayItemToArea(SettingsQuantityTakeoff):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AssignPayItemToAreaOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AssignPayItemToAreaOption(self: SettingsCmdAssignPayItemToArea) -> SettingsCmdAssignPayItemToAreaOptions
"""
SettingsCmdAssignPayItemToAreaOptions = None
class SettingsCmdCatchmentArea(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DischargePointStyle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DischargePointStyle(self: SettingsCmdCatchmentArea) -> PropertyString
"""
DischargePointStyleId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DischargePointStyleId(self: SettingsCmdCatchmentArea) -> PropertyObjectId
"""
DisplayDisChargePoint = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DisplayDisChargePoint(self: SettingsCmdCatchmentArea) -> PropertyBoolean
"""
Layer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Layer(self: SettingsCmdCatchmentArea) -> PropertyLayer
"""
ObjectType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ObjectType(self: SettingsCmdCatchmentArea) -> PropertyEnum[CatchmentObjectType]
"""
class SettingsCmdComputeMaterials(SettingsQuantityTakeoff):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DefineMaterialOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DefineMaterialOption(self: SettingsCmdComputeMaterials) -> SettingsCmdDefineMaterial
"""
SettingsCmdDefineMaterial = None
class SettingsCmdConvertPointstoSdskPoints(SettingsPoint):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Layer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Layer(self: SettingsCmdConvertPointstoSdskPoints) -> SettingsCmdLayer
"""
SettingsCmdLayer = None
class SettingsCorridor(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsCorridor) -> SettingsNameFormat
"""
RegionHighlightGraphics = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RegionHighlightGraphics(self: SettingsCorridor) -> SettingsRegionHighlightGraphics
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsCorridor) -> SettingsStyles
"""
SettingsNameFormat = None
SettingsRegionHighlightGraphics = None
SettingsStyles = None
class SettingsCmdCorridorExtractSurfaces(SettingsCorridor):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateAlignFromCorridor(SettingsCorridor):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AlignmentTypeOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AlignmentTypeOption(self: SettingsCmdCreateAlignFromCorridor) -> SettingsCmdAlignmentTypeOption
"""
CriteriaBasedDesignOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CriteriaBasedDesignOptions(self: SettingsCmdCreateAlignFromCorridor) -> SettingsCmdCriteriaBasedDesignOptions
"""
ProfileCreationOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ProfileCreationOption(self: SettingsCmdCreateAlignFromCorridor) -> SettingsCmdProfileCreationOption
"""
SettingsCmdAlignmentTypeOption = None
SettingsCmdCriteriaBasedDesignOptions = None
SettingsCmdProfileCreationOption = None
class SettingsCmdCreateAlignFromNetwork(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AlignmentTypeOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AlignmentTypeOption(self: SettingsCmdCreateAlignFromNetwork) -> SettingsCmdAlignmentTypeOption
"""
SettingsCmdAlignmentTypeOption = None
class SettingsCmdCreateAlignFromPressureNW(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AlignmentTypeOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AlignmentTypeOption(self: SettingsCmdCreateAlignFromPressureNW) -> SettingsCmdAlignmentTypeOption
"""
SettingsCmdAlignmentTypeOption = None
class SettingsCmdCreateAlignmentEntities(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AlignmentTypeOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AlignmentTypeOption(self: SettingsCmdCreateAlignmentEntities) -> SettingsCmdAlignmentTypeOption
"""
CreateFromEntities = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CreateFromEntities(self: SettingsCmdCreateAlignmentEntities) -> SettingsCmdCreateFromEntities
"""
SettingsCmdAlignmentTypeOption = None
SettingsCmdCreateFromEntities = None
class SettingsCmdCreateAlignmentLayout(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AlignmentTypeOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AlignmentTypeOption(self: SettingsCmdCreateAlignmentLayout) -> SettingsCmdAlignmentTypeOption
"""
CurveAndSpiralSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CurveAndSpiralSettings(self: SettingsCmdCreateAlignmentLayout) -> SettingsCmdCurveAndSpiralSettings
"""
CurveTessellationOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CurveTessellationOption(self: SettingsCmdCreateAlignmentLayout) -> SettingsCmdCurveTessellationOption
"""
RegressionGraphOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RegressionGraphOption(self: SettingsCmdCreateAlignmentLayout) -> SettingsCmdRegressionGraphOption
"""
SettingsCmdAlignmentTypeOption = None
SettingsCmdCurveAndSpiralSettings = None
SettingsCmdCurveTessellationOption = None
SettingsCmdRegressionGraphOption = None
class SettingsCmdCreateAlignmentReference(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateArcByBestFit(SettingsGeneral):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
CurveTessellationOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CurveTessellationOption(self: SettingsCmdCreateArcByBestFit) -> SettingsCmdCurveTessellationOption
"""
RegressionGraphOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RegressionGraphOption(self: SettingsCmdCreateArcByBestFit) -> SettingsCmdRegressionGraphOption
"""
SettingsCmdCurveTessellationOption = None
SettingsCmdRegressionGraphOption = None
class SettingsCmdCreateAssembly(SettingsAssembly):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateAssemblyTool(SettingsAssembly):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateCantView(SettingsCantView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateCatchmentFromObject(SettingsCatchment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Catchment = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Catchment(self: SettingsCmdCreateCatchmentFromObject) -> SettingsCmdCatchment
"""
ChannelFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ChannelFlow(self: SettingsCmdCreateCatchmentFromObject) -> SettingsCmdChannelFlow
"""
HydrologicalProperties = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: HydrologicalProperties(self: SettingsCmdCreateCatchmentFromObject) -> SettingsCmdHydrologicalProperties
"""
ShallowConcentratedFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ShallowConcentratedFlow(self: SettingsCmdCreateCatchmentFromObject) -> SettingsCmdShallowConcentratedFlow
"""
SheetFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SheetFlow(self: SettingsCmdCreateCatchmentFromObject) -> SettingsCmdSheetFlow
"""
TimeOfConcentrationMethod = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TimeOfConcentrationMethod(self: SettingsCmdCreateCatchmentFromObject) -> PropertyEnum[CatchmentTimeOfConcentrationMethodType]
"""
SettingsCmdCatchment = None
SettingsCmdChannelFlow = None
SettingsCmdHydrologicalProperties = None
SettingsCmdShallowConcentratedFlow = None
SettingsCmdSheetFlow = None
class SettingsCmdCreateCatchmentFromSurface(SettingsCatchment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Catchment = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Catchment(self: SettingsCmdCreateCatchmentFromSurface) -> SettingsCmdCatchment
"""
ChannelFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ChannelFlow(self: SettingsCmdCreateCatchmentFromSurface) -> SettingsCmdChannelFlow
"""
HydrologicalProperties = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: HydrologicalProperties(self: SettingsCmdCreateCatchmentFromSurface) -> SettingsCmdHydrologicalProperties
"""
ShallowConcentratedFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ShallowConcentratedFlow(self: SettingsCmdCreateCatchmentFromSurface) -> SettingsCmdShallowConcentratedFlow
"""
SheetFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SheetFlow(self: SettingsCmdCreateCatchmentFromSurface) -> SettingsCmdSheetFlow
"""
TimeOfConcentrationMethod = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TimeOfConcentrationMethod(self: SettingsCmdCreateCatchmentFromSurface) -> PropertyEnum[CatchmentTimeOfConcentrationMethodType]
"""
SettingsCmdCatchment = None
SettingsCmdChannelFlow = None
SettingsCmdHydrologicalProperties = None
SettingsCmdShallowConcentratedFlow = None
SettingsCmdSheetFlow = None
class SettingsCmdCreateCatchmentGroup(SettingsCatchment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateCorridor(SettingsCorridor):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AssemblyInsertion = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AssemblyInsertion(self: SettingsCmdCreateCorridor) -> SettingsCmdAssemblyInsertion
"""
SettingsCmdAssemblyInsertion = None
class SettingsGrading(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsGrading) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsGrading) -> SettingsStyles
"""
SettingsNameFormat = None
SettingsStyles = None
class SettingsCmdCreateFeatureLineFromAlign(SettingsGrading):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
FeatureLineCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FeatureLineCreation(self: SettingsCmdCreateFeatureLineFromAlign) -> SettingsCmdFeatureLineCreation
"""
SettingsCmdFeatureLineCreation = None
class SettingsCmdCreateFeatureLines(SettingsGrading):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
FeatureLineCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FeatureLineCreation(self: SettingsCmdCreateFeatureLines) -> SettingsCmdFeatureLineCreation
"""
SettingsCmdFeatureLineCreation = None
class SettingsCmdCreateFlowSegment(SettingsCatchment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
ChannelFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ChannelFlow(self: SettingsCmdCreateFlowSegment) -> SettingsCmdChannelFlow
"""
ShallowConcentratedFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ShallowConcentratedFlow(self: SettingsCmdCreateFlowSegment) -> SettingsCmdShallowConcentratedFlow
"""
SheetFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SheetFlow(self: SettingsCmdCreateFlowSegment) -> SettingsCmdSheetFlow
"""
SettingsCmdChannelFlow = None
SettingsCmdShallowConcentratedFlow = None
SettingsCmdSheetFlow = None
class SettingsCmdCreateGrading(SettingsGrading):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
GradingCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GradingCreation(self: SettingsCmdCreateGrading) -> SettingsCmdGradingCreation
"""
SettingsCmdGradingCreation = None
class SettingsCmdCreateGradingGroup(SettingsGrading):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
GradingGroupCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GradingGroupCreation(self: SettingsCmdCreateGradingGroup) -> SettingsCmdGradingGroupCreation
"""
SettingsCmdGradingGroupCreation = None
class SettingsCmdCreateInterferenceCheck(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
InterferenceCriteria = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: InterferenceCriteria(self: SettingsCmdCreateInterferenceCheck) -> SettingsCmdInterferenceCriteria
"""
SettingsCmdInterferenceCriteria = None
class SettingsCmdCreateIntersection(SettingsIntersection):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AssemblyInsertion = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AssemblyInsertion(self: SettingsCmdCreateIntersection) -> SettingsCmdAssemblyInsertion
"""
CrossSlopes = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CrossSlopes(self: SettingsCmdCreateIntersection) -> SettingsCmdCrossSlopes
"""
CurbReturnParameters = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CurbReturnParameters(self: SettingsCmdCreateIntersection) -> SettingsCmdCurbReturnParameters
"""
CurbReturnProfileRules = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CurbReturnProfileRules(self: SettingsCmdCreateIntersection) -> SettingsCmdCurbReturnProfileRules
"""
IntersectionOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IntersectionOptions(self: SettingsCmdCreateIntersection) -> SettingsCmdIntersectionOptions
"""
Offsets = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Offsets(self: SettingsCmdCreateIntersection) -> SettingsCmdOffsets
"""
SecondaryRoadProfileRules = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SecondaryRoadProfileRules(self: SettingsCmdCreateIntersection) -> SettingsCmdSecondaryRoadProfileRules
"""
WideningParameters = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: WideningParameters(self: SettingsCmdCreateIntersection) -> SettingsCmdWideningParameters
"""
SettingsCmdAssemblyInsertion = None
SettingsCmdCrossSlopes = None
SettingsCmdCurbReturnParameters = None
SettingsCmdCurbReturnProfileRules = None
SettingsCmdIntersectionOptions = None
SettingsCmdOffsets = None
SettingsCmdSecondaryRoadProfileRules = None
SettingsCmdWideningParameters = None
class SettingsCmdCreateLineByBestFit(SettingsGeneral):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
CurveTessellationOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CurveTessellationOption(self: SettingsCmdCreateLineByBestFit) -> SettingsCmdCurveTessellationOption
"""
RegressionGraphOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RegressionGraphOption(self: SettingsCmdCreateLineByBestFit) -> SettingsCmdRegressionGraphOption
"""
SettingsCmdCurveTessellationOption = None
SettingsCmdRegressionGraphOption = None
class SettingsMassHaulView(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
MassHaulCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MassHaulCreation(self: SettingsMassHaulView) -> SettingsMassHaulCreation
"""
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsMassHaulView) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsMassHaulView) -> SettingsStyles
"""
SettingsMassHaulCreation = None
SettingsNameFormat = None
SettingsStyles = None
class SettingsCmdCreateMassHaulDiagram(SettingsMassHaulView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
MassHaulCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MassHaulCreation(self: SettingsCmdCreateMassHaulDiagram) -> SettingsCmdMassHaulCreation
"""
SettingsCmdMassHaulCreation = None
class SettingsCmdCreateMultipleProfileView(SettingsProfileView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
MultipleProfileViewCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MultipleProfileViewCreation(self: SettingsCmdCreateMultipleProfileView) -> SettingsCmdMultipleProfileViewCreation
"""
SettingsCmdMultipleProfileViewCreation = None
class SettingsCmdCreateMultipleSectionView(SettingsSectionView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
MultipleSectionViewCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MultipleSectionViewCreation(self: SettingsCmdCreateMultipleSectionView) -> SettingsCmdMultipleSectionViewCreation
"""
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdCreateMultipleSectionView) -> SettingsCmdTableCreation
"""
SettingsCmdMultipleSectionViewCreation = None
SettingsCmdTableCreation = None
class SettingsCmdCreateNetwork(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DefaultLayoutCommand = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DefaultLayoutCommand(self: SettingsCmdCreateNetwork) -> PropertyEnum[NetworkDefaultLayoutCommandType]
"""
LabelNewParts = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LabelNewParts(self: SettingsCmdCreateNetwork) -> SettingsCmdLabelNewParts
"""
SettingsCmdLabelNewParts = None
class SettingsCmdCreateNetworkFromObject(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateNetworkPartsList(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateNetworkPartsListFull(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateNetworkReference(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateOffsetAlignment(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
OffsetAlignmentOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: OffsetAlignmentOptions(self: SettingsCmdCreateOffsetAlignment) -> SettingsCmdOffsetAlignmentOptions
"""
SettingsCmdOffsetAlignmentOptions = None
class SettingsCmdCreateParabolaByBestFit(SettingsGeneral):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
CurveTessellationOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CurveTessellationOption(self: SettingsCmdCreateParabolaByBestFit) -> SettingsCmdCurveTessellationOption
"""
RegressionGraphOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RegressionGraphOption(self: SettingsCmdCreateParabolaByBestFit) -> SettingsCmdRegressionGraphOption
"""
SettingsCmdCurveTessellationOption = None
SettingsCmdRegressionGraphOption = None
class SettingsCmdCreateParcelByLayout(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AutomaticLayout = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AutomaticLayout(self: SettingsCmdCreateParcelByLayout) -> SettingsCmdAutomaticLayout
"""
ConvertFromEntities = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ConvertFromEntities(self: SettingsCmdCreateParcelByLayout) -> SettingsCmdConvertFromEntities
"""
ParcelSizing = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ParcelSizing(self: SettingsCmdCreateParcelByLayout) -> SettingsCmdParcelSizing
"""
PreviewGraphics = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PreviewGraphics(self: SettingsCmdCreateParcelByLayout) -> SettingsCmdPreviewGraphics
"""
SettingsCmdAutomaticLayout = None
SettingsCmdConvertFromEntities = None
SettingsCmdParcelSizing = None
SettingsCmdPreviewGraphics = None
class SettingsCmdCreateParcelFromObjects(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
ConvertFromEntities = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ConvertFromEntities(self: SettingsCmdCreateParcelFromObjects) -> SettingsCmdConvertFromEntities
"""
SettingsCmdConvertFromEntities = None
class SettingsCmdCreateParcelROW(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
CleanupAtAlignmentIntersections = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CleanupAtAlignmentIntersections(self: SettingsCmdCreateParcelROW) -> SettingsCmdCleanupAtAlignmentIntersections
"""
CleanupAtParcelBoundaries = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CleanupAtParcelBoundaries(self: SettingsCmdCreateParcelROW) -> SettingsCmdCleanupAtParcelBoundaries
"""
CreateParcelRightOfWay = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CreateParcelRightOfWay(self: SettingsCmdCreateParcelROW) -> SettingsCmdCreateParcelRightOfWay
"""
SettingsCmdCleanupAtAlignmentIntersections = None
SettingsCmdCleanupAtParcelBoundaries = None
SettingsCmdCreateParcelRightOfWay = None
class SettingsCmdCreatePointCloud(SettingsPointCloud):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DefaultLayer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DefaultLayer(self: SettingsCmdCreatePointCloud) -> SettingsCmdDefaultLayer
"""
FileFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FileFormat(self: SettingsCmdCreatePointCloud) -> PropertyEnum[PointCloudDefaultFileExtensionType]
"""
SettingsCmdDefaultLayer = None
class SettingsCmdCreatePointGroup(SettingsPoint):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreatePoints(SettingsPoint):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Layer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Layer(self: SettingsCmdCreatePoints) -> SettingsCmdLayer
"""
PointIdentity = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PointIdentity(self: SettingsCmdCreatePoints) -> SettingsCmdPointIdentity
"""
PointsCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PointsCreation(self: SettingsCmdCreatePoints) -> SettingsCmdPointsCreation
"""
SettingsCmdLayer = None
SettingsCmdPointIdentity = None
SettingsCmdPointsCreation = None
class SettingsCmdCreatePointsFromCorridor(SettingsCorridor):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreatePolylineFromCorridor(SettingsCorridor):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsSuperelevationView(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsSuperelevationView) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsSuperelevationView) -> SettingsStyles
"""
SettingsNameFormat = None
SettingsStyles = None
class SettingsCmdCreatePolylineFromSuper(SettingsSuperelevationView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreatePressureFromIndModel(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreatePressureNetwork(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DepthOfCover = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DepthOfCover(self: SettingsCmdCreatePressureNetwork) -> SettingsCmdDepthOfCover
"""
LabelNewParts = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LabelNewParts(self: SettingsCmdCreatePressureNetwork) -> SettingsCmdLabelNewParts
"""
SettingsCmdDepthOfCover = None
SettingsCmdLabelNewParts = None
class SettingsCmdCreatePressurePartList(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreatePressurePartListFull(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateProfileFromCorridor(SettingsCorridor):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
CriteriaBasedDesignOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CriteriaBasedDesignOptions(self: SettingsCmdCreateProfileFromCorridor) -> SettingsCmdCriteriaBasedDesignOptions
"""
SettingsCmdCriteriaBasedDesignOptions = None
class SettingsProfile(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
CriteriaBasedDesignOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CriteriaBasedDesignOptions(self: SettingsProfile) -> SettingsCriteriaBasedDesignOptions
"""
DefaultNameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DefaultNameFormat(self: SettingsProfile) -> SettingsDefaultNameFormat
"""
ProfilesCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ProfilesCreation(self: SettingsProfile) -> SettingsProfileCreation
"""
StyleSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: StyleSettings(self: SettingsProfile) -> SettingsStyles
"""
SettingsCriteriaBasedDesignOptions = None
SettingsDefaultNameFormat = None
SettingsProfileCreation = None
SettingsStyles = None
class SettingsCmdCreateProfileFromFile(SettingsProfile):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateProfileFromSurface(SettingsProfile):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Geometry = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Geometry(self: SettingsCmdCreateProfileFromSurface) -> SettingsCmdGeometry
"""
SettingsCmdGeometry = None
class SettingsCmdCreateProfileLayout(SettingsProfile):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
CurveTessellationOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CurveTessellationOption(self: SettingsCmdCreateProfileLayout) -> SettingsCmdCurveTessellationOption
"""
RegressionGraphOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RegressionGraphOption(self: SettingsCmdCreateProfileLayout) -> SettingsCmdRegressionGraphOption
"""
SettingsCmdCurveTessellationOption = None
SettingsCmdRegressionGraphOption = None
class SettingsCmdCreateProfileReference(SettingsProfile):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateProfileView(SettingsProfileView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateQuickProfile(SettingsProfile):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
QuickProfile = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: QuickProfile(self: SettingsCmdCreateQuickProfile) -> SettingsCmdQuickProfile
"""
SettingsCmdQuickProfile = None
class SettingsSampleLine(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsSampleLine) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsSampleLine) -> SettingsStyles
"""
SettingsNameFormat = None
SettingsStyles = None
class SettingsCmdCreateSampleLines(SettingsSampleLine):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AdditionalSampleControls = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AdditionalSampleControls(self: SettingsCmdCreateSampleLines) -> SettingsCmdAdditionalSampleControls
"""
Miscellaneous = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Miscellaneous(self: SettingsCmdCreateSampleLines) -> SettingsCmdMiscellaneous
"""
SamplingIncrements = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SamplingIncrements(self: SettingsCmdCreateSampleLines) -> SettingsCmdSamplingIncrements
"""
SwathWidths = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SwathWidths(self: SettingsCmdCreateSampleLines) -> SettingsCmdSwathWidths
"""
SettingsCmdAdditionalSampleControls = None
SettingsCmdMiscellaneous = None
SettingsCmdSamplingIncrements = None
SettingsCmdSwathWidths = None
class SettingsCmdCreateSectionSheets(SettingsSectionView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
SheetCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SheetCreation(self: SettingsCmdCreateSectionSheets) -> SettingsCmdSheetCreation
"""
SettingsCmdSheetCreation = None
class SettingsCmdCreateSectionView(SettingsSectionView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
TableCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TableCreation(self: SettingsCmdCreateSectionView) -> SettingsCmdTableCreation
"""
SettingsCmdTableCreation = None
class SettingsViewFrameGroup(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Information = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Information(self: SettingsViewFrameGroup) -> SettingsInformation
"""
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsViewFrameGroup) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsViewFrameGroup) -> SettingsStyles
"""
SettingsInformation = None
SettingsNameFormat = None
SettingsStyles = None
class SettingsCmdCreateSheets(SettingsViewFrameGroup):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
SheetCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SheetCreation(self: SettingsCmdCreateSheets) -> SettingsCmdSheetCreation
"""
SettingsCmdSheetCreation = None
class SettingsCmdCreateSimpleCorridor(SettingsCorridor):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AssemblyInsertion = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AssemblyInsertion(self: SettingsCmdCreateSimpleCorridor) -> SettingsCmdAssemblyInsertion
"""
SettingsCmdAssemblyInsertion = None
class SettingsCmdCreateSite(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Alignment = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Alignment(self: SettingsCmdCreateSite) -> SettingsCmdAlignment
"""
FeatureLine = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FeatureLine(self: SettingsCmdCreateSite) -> SettingsCmdFeatureLine
"""
Parcel = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Parcel(self: SettingsCmdCreateSite) -> SettingsCmdParcel
"""
SettingsCmdAlignment = None
SettingsCmdFeatureLine = None
SettingsCmdParcel = None
class SettingsSubassembly(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DefaultStyles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DefaultStyles(self: SettingsSubassembly) -> SettingsDefaultStyles
"""
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsSubassembly) -> SettingsNameFormat
"""
SettingsDefaultStyles = None
SettingsNameFormat = None
class SettingsCmdCreateSubassemblyTool(SettingsSubassembly):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
SubassemblyOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SubassemblyOptions(self: SettingsCmdCreateSubassemblyTool) -> SettingsCmdSubassemblyOptions
"""
SettingsCmdSubassemblyOptions = None
class SettingsCmdCreateSubFromPline(SettingsSubassembly):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
CreateFromEntities = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CreateFromEntities(self: SettingsCmdCreateSubFromPline) -> SettingsCmdCreateFromEntities
"""
SettingsCmdCreateFromEntities = None
class SettingsCmdCreateSuperelevationView(SettingsSuperelevationView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateSurface(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
BuildOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: BuildOptions(self: SettingsCmdCreateSurface) -> SettingsCmdBuildOptions
"""
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsCmdCreateSurface) -> SettingsNameFormat
"""
SurfaceCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SurfaceCreation(self: SettingsCmdCreateSurface) -> SettingsCmdSurfaceCreation
"""
SettingsCmdBuildOptions = None
SettingsCmdSurfaceCreation = None
SettingsNameFormat = None
class SettingsCmdCreateSurfaceFromTIN(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdCreateSurfaceGridFromDEM(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
BuildOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: BuildOptions(self: SettingsCmdCreateSurfaceGridFromDEM) -> SettingsCmdBuildOptions
"""
ImportOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ImportOptions(self: SettingsCmdCreateSurfaceGridFromDEM) -> SettingsCmdImportOptions
"""
SettingsCmdBuildOptions = None
SettingsCmdImportOptions = None
class SettingsCmdCreateSurfaceReference(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsCmdCreateSurfaceReference) -> SettingsNameFormat
"""
SettingsNameFormat = None
class SettingsCmdCreateSurfaceWaterdrop(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
WaterdropMarker = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: WaterdropMarker(self: SettingsCmdCreateSurfaceWaterdrop) -> SettingsCmdWaterdropMarker
"""
WaterdropPath = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: WaterdropPath(self: SettingsCmdCreateSurfaceWaterdrop) -> SettingsCmdWaterdropPath
"""
SettingsCmdWaterdropMarker = None
SettingsCmdWaterdropPath = None
class SettingsCmdCreateViewFrames(SettingsViewFrameGroup):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
ViewFrameCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ViewFrameCreation(self: SettingsCmdCreateViewFrames) -> SettingsCmdViewFrameCreation
"""
SettingsCmdViewFrameCreation = None
class SettingsCmdDrawFeatureLine(SettingsGrading):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
FeatureLineCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FeatureLineCreation(self: SettingsCmdDrawFeatureLine) -> SettingsCmdFeatureLineCreation
"""
SettingsCmdFeatureLineCreation = None
class SettingsCmdEditFlowSegments(SettingsCatchment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
ChannelFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ChannelFlow(self: SettingsCmdEditFlowSegments) -> SettingsCmdChannelFlow
"""
ShallowConcentratedFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ShallowConcentratedFlow(self: SettingsCmdEditFlowSegments) -> SettingsCmdShallowConcentratedFlow
"""
SheetFlow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SheetFlow(self: SettingsCmdEditFlowSegments) -> SettingsCmdSheetFlow
"""
SettingsCmdChannelFlow = None
SettingsCmdShallowConcentratedFlow = None
SettingsCmdSheetFlow = None
class SettingsCmdEditInStormSewers(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdEditSVGroupStyle(SettingsSectionView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdExportParcelAnalysis(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
ParcelAnalysis = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ParcelAnalysis(self: SettingsCmdExportParcelAnalysis) -> SettingsCmdParcelAnalysis
"""
SettingsCmdParcelAnalysis = None
class SettingsCmdExportStormSewerData(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdFeatureLinesFromCorridor(SettingsCorridor):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
FeatureLineCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FeatureLineCreation(self: SettingsCmdFeatureLinesFromCorridor) -> SettingsCmdFeatureLineCreation
"""
SettingsCmdFeatureLineCreation = None
class SettingsCmdFitCurveFeature(SettingsGrading):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
FeatureLineFitCurve = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FeatureLineFitCurve(self: SettingsCmdFitCurveFeature) -> SettingsCmdFeatureLineFitCurve
"""
SettingsCmdFeatureLineFitCurve = None
class SettingsCmdGenerateQuantitiesReport(SettingsQuantityTakeoff):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DisplayXmlReport = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DisplayXmlReport(self: SettingsCmdGenerateQuantitiesReport) -> PropertyBoolean
"""
class SettingsCmdGradingElevEditor(SettingsGrading):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
GradingElevationEditor = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GradingElevationEditor(self: SettingsCmdGradingElevEditor) -> SettingsCmdGradingElevationEditor
"""
SettingsCmdGradingElevationEditor = None
class SettingsCmdGradingTools(SettingsGrading):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
GradingLayoutTools = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GradingLayoutTools(self: SettingsCmdGradingTools) -> SettingsCmdGradingLayoutTools
"""
SettingsCmdGradingLayoutTools = None
class SettingsCmdGradingVolumeTools(SettingsGrading):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
LimitFeatureSelectionToCurrentGroup = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LimitFeatureSelectionToCurrentGroup(self: SettingsCmdGradingVolumeTools) -> PropertyBoolean
"""
RaiseLowerElevationIncrement = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RaiseLowerElevationIncrement(self: SettingsCmdGradingVolumeTools) -> PropertyDouble
"""
class SettingsCmdImportBuildingSite(SettingsBuildingSite):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdImportGISData(SettingsGeneral):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
PipeNetwork = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PipeNetwork(self: SettingsCmdImportGISData) -> SettingsCmdPipeNetwork
"""
SettingsCmdPipeNetwork = None
class SettingsCmdImportStormSewerData(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdJoinFeatures(SettingsGrading):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
FeatureLineJoin = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FeatureLineJoin(self: SettingsCmdJoinFeatures) -> SettingsCmdFeatureLineJoin
"""
SettingsCmdFeatureLineJoin = None
class SettingsCmdLayoutSectionViewGroup(SettingsSectionView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdMapCheck(SettingsGeneral):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Mapcheck = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Mapcheck(self: SettingsCmdMapCheck) -> SettingsCmdMapcheck
"""
SettingsCmdMapcheck = None
class SettingsCmdMinimizeSurfaceFlatAreas(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AddPointsToFlatEdges = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AddPointsToFlatEdges(self: SettingsCmdMinimizeSurfaceFlatAreas) -> PropertyBoolean
"""
AddPointsToFlatTriangles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AddPointsToFlatTriangles(self: SettingsCmdMinimizeSurfaceFlatAreas) -> PropertyBoolean
"""
FillGapsInContour = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FillGapsInContour(self: SettingsCmdMinimizeSurfaceFlatAreas) -> PropertyBoolean
"""
SwapEdges = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SwapEdges(self: SettingsCmdMinimizeSurfaceFlatAreas) -> PropertyBoolean
"""
class SettingsCmdMoveBlockstoAttribElev(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdMoveBlocksToSurface(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdMoveTextToElevation(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdProjectObjectsToMultiSect(SettingsSectionView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
ObjectSelectionOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ObjectSelectionOptions(self: SettingsCmdProjectObjectsToMultiSect) -> SettingsCmdObjectSelectionOptions
"""
SettingsCmdObjectSelectionOptions = None
class SettingsCmdProjectObjectsToProf(SettingsProfileView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdProjectObjectsToSect(SettingsSectionView):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdReAddParcelAreaLabel(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdReAddParcelSegmentLabels(SettingsParcel):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdRenamePipeNetworkParts(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdResetAnchorPipe(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdReverseAlignmentDirection(SettingsAlignment):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdRunDepthCheck(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DepthCheckOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DepthCheckOption(self: SettingsCmdRunDepthCheck) -> SettingsCmdDepthCheckOption
"""
SettingsCmdDepthCheckOption = None
class SettingsCmdRunDesignCheck(SettingsPressureNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
DesignCheckOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DesignCheckOption(self: SettingsCmdRunDesignCheck) -> SettingsCmdDesignCheckOption
"""
SettingsCmdDesignCheckOption = None
class SettingsCmdShowGeodeticCalculator(SettingsPoint):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdShowPointGroupProperties(SettingsPoint):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdShowSpanningPipes(SettingsPipeNetwork):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdSimplifySurface(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
MaximumChangeInElevation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MaximumChangeInElevation(self: SettingsCmdSimplifySurface) -> PropertyDouble
"""
PercentageOfPointsToRemove = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PercentageOfPointsToRemove(self: SettingsCmdSimplifySurface) -> PropertyDouble
"""
RegionOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RegionOptions(self: SettingsCmdSimplifySurface) -> PropertyEnum[SurfaceRegionOptionsType]
"""
SimplifyMethod = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SimplifyMethod(self: SettingsCmdSimplifySurface) -> PropertyEnum[SurfaceSimplifyType]
"""
UseMaximumChangeInElevation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: UseMaximumChangeInElevation(self: SettingsCmdSimplifySurface) -> PropertyBoolean
"""
UsePercentageOfPointsToRemove = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: UsePercentageOfPointsToRemove(self: SettingsCmdSimplifySurface) -> PropertyBoolean
"""
class SettingsCmdSuperimposeProfile(SettingsProfile):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
SuperimposeProfile = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SuperimposeProfile(self: SettingsCmdSuperimposeProfile) -> SettingsCmdSuperimposeProfileOption
"""
SettingsCmdSuperimposeProfileOption = None
class SettingsCmdSurfaceExportToDem(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
ExportOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ExportOptions(self: SettingsCmdSurfaceExportToDem) -> SettingsCmdExportOptions
"""
SettingsCmdExportOptions = None
class SettingsCmdSurfaceExtractObjects(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsCmdTakeOff(SettingsQuantityTakeoff):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
ComputeTakeOffOption = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ComputeTakeOffOption(self: SettingsCmdTakeOff) -> SettingsCmdComputeTakeOff
"""
SettingsCmdComputeTakeOff = None
class SettingsCmdViewEditCorridorSection(SettingsCorridor):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
GridSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GridSettings(self: SettingsCmdViewEditCorridorSection) -> SettingsCmdGridSettings
"""
GridTextSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GridTextSettings(self: SettingsCmdViewEditCorridorSection) -> SettingsCmdGridTextSettings
"""
SectionSliderInMultipleViewports = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SectionSliderInMultipleViewports(self: SettingsCmdViewEditCorridorSection) -> SettingsCmdSectionSliderInMultipleViewports
"""
ViewEditOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ViewEditOptions(self: SettingsCmdViewEditCorridorSection) -> SettingsCmdViewEditOptions
"""
SettingsCmdGridSettings = None
SettingsCmdGridTextSettings = None
SettingsCmdSectionSliderInMultipleViewports = None
SettingsCmdViewEditOptions = None
class SettingsCmdVolumesDashboard(SettingsSurface):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
BoundedVolumeCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: BoundedVolumeCreation(self: SettingsCmdVolumesDashboard) -> SettingsCmdBoundedVolumeCreation
"""
BuildOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: BuildOptions(self: SettingsCmdVolumesDashboard) -> SettingsCmdBuildOptions
"""
DynamicHighlightOptions = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DynamicHighlightOptions(self: SettingsCmdVolumesDashboard) -> SettingsCmdDynamicHighlightOptions
"""
VolumeSurfaceCreation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: VolumeSurfaceCreation(self: SettingsCmdVolumesDashboard) -> SettingsCmdVolumeSurfaceCreation
"""
SettingsCmdBoundedVolumeCreation = None
SettingsCmdBuildOptions = None
SettingsCmdDynamicHighlightOptions = None
SettingsCmdVolumeSurfaceCreation = None
class SettingsCmdWeedFeatures(SettingsGrading):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
FeatureLineWeed = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FeatureLineWeed(self: SettingsCmdWeedFeatures) -> SettingsCmdFeatureLineWeed
"""
SettingsCmdFeatureLineWeed = None
class SettingsCoordinateSystem(object):
# no doc
Category = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Category(self: SettingsCoordinateSystem) -> str
"""
Code = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Code(self: SettingsCoordinateSystem) -> str
"""
Datum = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Datum(self: SettingsCoordinateSystem) -> str
"""
Description = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Description(self: SettingsCoordinateSystem) -> str
"""
Projection = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Projection(self: SettingsCoordinateSystem) -> str
"""
Unit = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Unit(self: SettingsCoordinateSystem) -> str
"""
class SettingsDrawing(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AbbreviationsSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AbbreviationsSettings(self: SettingsDrawing) -> SettingsAbbreviation
"""
AmbientSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AmbientSettings(self: SettingsDrawing) -> SettingsAmbient
"""
ApplyTransformSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ApplyTransformSettings(self: SettingsDrawing) -> bool
Set: ApplyTransformSettings(self: SettingsDrawing) = value
"""
ObjectLayerSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ObjectLayerSettings(self: SettingsDrawing) -> SettingsObjectLayers
"""
TransformationSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TransformationSettings(self: SettingsDrawing) -> SettingsTransformation
"""
UnitZoneSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: UnitZoneSettings(self: SettingsDrawing) -> SettingsUnitZone
"""
class SettingsLandXML(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Export = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Export(self: SettingsLandXML) -> SettingsLandXMLExport
"""
Import = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Import(self: SettingsLandXML) -> SettingsLandXMLImport
"""
class SettingsLandXMLExport(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AlignmentExport = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AlignmentExport(self: SettingsLandXMLExport) -> SettingsAlignmentExport
"""
Data = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Data(self: SettingsLandXMLExport) -> SettingsData
"""
FeatureLineExport = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FeatureLineExport(self: SettingsLandXMLExport) -> SettingsFeatureLineExport
"""
Identification = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Identification(self: SettingsLandXMLExport) -> SettingsIdentification
"""
ParcelExport = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ParcelExport(self: SettingsLandXMLExport) -> SettingsParcelExport
"""
PointExport = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PointExport(self: SettingsLandXMLExport) -> SettingsPointExport
"""
SurfaceExport = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SurfaceExport(self: SettingsLandXMLExport) -> SettingsSurfaceExport
"""
SettingsAlignmentExport = None
SettingsData = None
SettingsFeatureLineExport = None
SettingsIdentification = None
SettingsParcelExport = None
SettingsPointExport = None
SettingsSurfaceExport = None
class SettingsLandXMLImport(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
AlignmentImport = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AlignmentImport(self: SettingsLandXMLImport) -> SettingsAlignmentImport
"""
ConflictResolution = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ConflictResolution(self: SettingsLandXMLImport) -> SettingsConflictResolution
"""
DiameterUnits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DiameterUnits(self: SettingsLandXMLImport) -> SettingsDiameterUnits
"""
FeatureLineImport = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FeatureLineImport(self: SettingsLandXMLImport) -> SettingsFeatureLineImport
"""
PipeNetwork = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PipeNetwork(self: SettingsLandXMLImport) -> SettingsPipeNetwork
"""
PointImport = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PointImport(self: SettingsLandXMLImport) -> SettingsPointImport
"""
PropertySetData = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PropertySetData(self: SettingsLandXMLImport) -> SettingsPropertySetData
"""
Rotation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Rotation(self: SettingsLandXMLImport) -> SettingsRotation
"""
SurfaceImport = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SurfaceImport(self: SettingsLandXMLImport) -> SettingsSurfaceImport
"""
Translation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Translation(self: SettingsLandXMLImport) -> SettingsTranslation
"""
SettingsAlignmentImport = None
SettingsConflictResolution = None
SettingsDiameterUnits = None
SettingsFeatureLineImport = None
SettingsPipeNetwork = None
SettingsPointImport = None
SettingsPropertySetData = None
SettingsRotation = None
SettingsSurfaceImport = None
SettingsTranslation = None
class SettingsMassHaulLine(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsMatchLine(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsObjectLayer(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
LayerId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LayerId(self: SettingsObjectLayer) -> ObjectId
Set: LayerId(self: SettingsObjectLayer) = value
"""
LayerName = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LayerName(self: SettingsObjectLayer) -> str
Set: LayerName(self: SettingsObjectLayer) = value
"""
Locked = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Locked(self: SettingsObjectLayer) -> bool
Set: Locked(self: SettingsObjectLayer) = value
"""
Modifier = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Modifier(self: SettingsObjectLayer) -> ObjectLayerModifierType
Set: Modifier(self: SettingsObjectLayer) = value
"""
ModifierValue = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ModifierValue(self: SettingsObjectLayer) -> str
Set: ModifierValue(self: SettingsObjectLayer) = value
"""
ObjectType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ObjectType(self: SettingsObjectLayer) -> SettingsObjectLayerType
"""
class SettingsObjectLayers(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def GetObjectLayerSetting(self, settingsType):
""" GetObjectLayerSetting(self: SettingsObjectLayers, settingsType: SettingsObjectLayerType) -> SettingsObjectLayer """
pass
ObjectControlledByLayer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ObjectControlledByLayer(self: SettingsObjectLayers) -> bool
Set: ObjectControlledByLayer(self: SettingsObjectLayers) = value
"""
class SettingsObjectLayerType(Enum):
""" enum SettingsObjectLayerType, values: Alignment (0), AlignmentLabeling (1), AlignmentTable (2), Appurtenance (56), AppurtenanceLabeling (57), Assembly (3), BuildingSite (53), CantView (58), Catchment (59), CatchmentLabeling (60), Corridor (4), CorridorSection (5), FeatureLine (6), Fitting (61), FittingLabeling (62), GeneralNoteLabel (7), GeneralSegmentLabel (8), Grading (9), GradingLabeling (10), GridSurface (11), GridSurfaceLabeling (12), Interference (13), Intersection (54), IntersectionLabeling (55), MassHaulLine (14), MassHaulView (15), MatchLine (16), MatchLineLabeling (17), MaterialSection (18), MaterialTable (19), Parcel (20), ParcelLabeling (21), ParcelSegment (22), ParcelSegmentLabeling (23), ParcelTable (24), Pipe (25), PipeAndStructureTable (27), PipeLabeling (26), PipeNetworkSection (28), PipeOrStructureProfile (29), PointTable (30), PressureNetworkSection (63), PressurePartProfile (64), PressurePartTable (65), PressurePipe (66), PressurePipeLabeling (67), Profile (31), ProfileLabeling (32), ProfileView (33), ProfileViewLabeling (34), SampleLine (35), SampleLineLabeling (36), Section (37), SectionLabeling (38), SectionView (39), SectionViewLabeling (40), SectionViewQuantityTakeoffTable (41), Sheet (42), Structure (43), StructureLabeling (44), Subassembly (45), SuperelevationView (68), SurfaceLegendTable (46), SurveyFigure (47), SurveyFigureLabeling (69), SurveyFigureSegmentLable (70), SurveyNetwork (48), TinSurface (49), TinSurfaceLabeling (50), ViewFrame (51), ViewFrameLabeling (52) """
Alignment = None
AlignmentLabeling = None
AlignmentTable = None
Appurtenance = None
AppurtenanceLabeling = None
Assembly = None
BuildingSite = None
CantView = None
Catchment = None
CatchmentLabeling = None
Corridor = None
CorridorSection = None
FeatureLine = None
Fitting = None
FittingLabeling = None
GeneralNoteLabel = None
GeneralSegmentLabel = None
Grading = None
GradingLabeling = None
GridSurface = None
GridSurfaceLabeling = None
Interference = None
Intersection = None
IntersectionLabeling = None
MassHaulLine = None
MassHaulView = None
MatchLine = None
MatchLineLabeling = None
MaterialSection = None
MaterialTable = None
Parcel = None
ParcelLabeling = None
ParcelSegment = None
ParcelSegmentLabeling = None
ParcelTable = None
Pipe = None
PipeAndStructureTable = None
PipeLabeling = None
PipeNetworkSection = None
PipeOrStructureProfile = None
PointTable = None
PressureNetworkSection = None
PressurePartProfile = None
PressurePartTable = None
PressurePipe = None
PressurePipeLabeling = None
Profile = None
ProfileLabeling = None
ProfileView = None
ProfileViewLabeling = None
SampleLine = None
SampleLineLabeling = None
Section = None
SectionLabeling = None
SectionView = None
SectionViewLabeling = None
SectionViewQuantityTakeoffTable = None
Sheet = None
Structure = None
StructureLabeling = None
Subassembly = None
SuperelevationView = None
SurfaceLegendTable = None
SurveyFigure = None
SurveyFigureLabeling = None
SurveyFigureSegmentLable = None
SurveyNetwork = None
TinSurface = None
TinSurfaceLabeling = None
value__ = None
ViewFrame = None
ViewFrameLabeling = None
class SettingsPipe(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsPressureAppurtenance(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsPressureFitting(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsPressurePipe(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsRoot(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def GetSettings(self):
# Error generating skeleton for function GetSettings: Method must be called on a Type for which Type.IsGenericParameter is false.
AssociateShortcutProjectId = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AssociateShortcutProjectId(self: SettingsRoot) -> str
Set: AssociateShortcutProjectId(self: SettingsRoot) = value
"""
DrawingSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DrawingSettings(self: SettingsRoot) -> SettingsDrawing
"""
LandXMLSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LandXMLSettings(self: SettingsRoot) -> SettingsLandXML
"""
TagSettings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TagSettings(self: SettingsRoot) -> SettingsTag
"""
class SettingsSection(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
NameFormat = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NameFormat(self: SettingsSection) -> SettingsNameFormat
"""
Styles = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Styles(self: SettingsSection) -> SettingsStyles
"""
SettingsNameFormat = None
SettingsStyles = None
class SettingsStructure(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SettingsTag(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
Creation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Creation(self: SettingsTag) -> SettingsCreation
"""
Renumbering = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Renumbering(self: SettingsTag) -> SettingsRenumbering
"""
SettingsCreation = None
SettingsRenumbering = None
class SettingsTransformation(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
ApplySeaLevelScaleFactor = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ApplySeaLevelScaleFactor(self: SettingsTransformation) -> bool
Set: ApplySeaLevelScaleFactor(self: SettingsTransformation) = value
"""
GridReferencePoint = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GridReferencePoint(self: SettingsTransformation) -> Point2d
Set: GridReferencePoint(self: SettingsTransformation) = value
"""
GridRotationPoint = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GridRotationPoint(self: SettingsTransformation) -> Point2d
Set: GridRotationPoint(self: SettingsTransformation) = value
"""
GridScaleFactor = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GridScaleFactor(self: SettingsTransformation) -> float
Set: GridScaleFactor(self: SettingsTransformation) = value
"""
GridScaleFactorComputation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: GridScaleFactorComputation(self: SettingsTransformation) -> GridScaleFactorType
Set: GridScaleFactorComputation(self: SettingsTransformation) = value
"""
LocalReferencePoint = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LocalReferencePoint(self: SettingsTransformation) -> Point2d
Set: LocalReferencePoint(self: SettingsTransformation) = value
"""
LocalRotationPoint = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LocalRotationPoint(self: SettingsTransformation) -> Point2d
Set: LocalRotationPoint(self: SettingsTransformation) = value
"""
RotationToGridAzimuth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RotationToGridAzimuth(self: SettingsTransformation) -> float
Set: RotationToGridAzimuth(self: SettingsTransformation) = value
"""
RotationToGridNorth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RotationToGridNorth(self: SettingsTransformation) -> float
Set: RotationToGridNorth(self: SettingsTransformation) = value
"""
SeaLevelScaleElevation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SeaLevelScaleElevation(self: SettingsTransformation) -> float
Set: SeaLevelScaleElevation(self: SettingsTransformation) = value
"""
SpecifyRotationType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SpecifyRotationType(self: SettingsTransformation) -> SpecifyRotationType
Set: SpecifyRotationType(self: SettingsTransformation) = value
"""
SpheroidRadius = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SpheroidRadius(self: SettingsTransformation) -> float
"""
class SettingsUnitZone(TreeOidWrapper):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
@staticmethod
def GetAllCodes():
""" GetAllCodes() -> Array[str] """
pass
@staticmethod
def GetCoordinateSystemByCode(code):
""" GetCoordinateSystemByCode(code: str) -> SettingsCoordinateSystem """
pass
AngularUnits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AngularUnits(self: SettingsUnitZone) -> AngleUnitType
Set: AngularUnits(self: SettingsUnitZone) = value
"""
CoordinateSystemCode = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CoordinateSystemCode(self: SettingsUnitZone) -> str
Set: CoordinateSystemCode(self: SettingsUnitZone) = value
"""
DrawingScale = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DrawingScale(self: SettingsUnitZone) -> float
Set: DrawingScale(self: SettingsUnitZone) = value
"""
DrawingUnits = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DrawingUnits(self: SettingsUnitZone) -> DrawingUnitType
Set: DrawingUnits(self: SettingsUnitZone) = value
"""
ImperialToMetricConversion = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ImperialToMetricConversion(self: SettingsUnitZone) -> ImperialToMetricConversionType
Set: ImperialToMetricConversion(self: SettingsUnitZone) = value
"""
MatchAutoCADVariables = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MatchAutoCADVariables(self: SettingsUnitZone) -> bool
Set: MatchAutoCADVariables(self: SettingsUnitZone) = value
"""
ScaleObjectsFromOtherDrawings = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ScaleObjectsFromOtherDrawings(self: SettingsUnitZone) -> bool
Set: ScaleObjectsFromOtherDrawings(self: SettingsUnitZone) = value
"""
class SettingsViewFrame(SettingsAmbient):
# no doc
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
class SpecifyRotationType(Enum):
""" enum SpecifyRotationType, values: GridRotationAngle (1), RotationPoint (0) """
GridRotationAngle = None
RotationPoint = None
value__ = None
class TableAnchorType(Enum):
""" enum TableAnchorType, values: BottomCenter (7), BottomLeft (6), BottomRight (8), MiddleCenter (4), MiddleLeft (3), MiddleRight (5), TopCenter (1), TopLeft (0), TopRight (2) """
BottomCenter = None
BottomLeft = None
BottomRight = None
MiddleCenter = None
MiddleLeft = None
MiddleRight = None
TopCenter = None
TopLeft = None
TopRight = None
value__ = None
class TableLayoutType(Enum):
""" enum TableLayoutType, values: Horizontal (0), Vertical (1) """
Horizontal = None
value__ = None
Vertical = None
class TileDirectionType(Enum):
""" enum TileDirectionType, values: Across (0), Down (1) """
Across = None
Down = None
value__ = None
| 32.170963 | 1,533 | 0.679776 | [
"MIT"
] | paoloemilioserra/ironpython-stubs | release/stubs/Autodesk/Civil/Settings.py | 142,260 | Python |
"""
Utilities for training, testing and caching results
for HICO-DET and V-COCO evaluations.
Fred Zhang <[email protected]>
The Australian National University
Australian Centre for Robotic Vision
"""
import os
import sys
import torch
import random
import warnings
import argparse
import numpy as np
import torch.distributed as dist
import torch.multiprocessing as mp
from torch.utils.data import DataLoader, DistributedSampler
from upt import build_detector
from utils import custom_collate, CustomisedDLE, DataFactory
warnings.filterwarnings("ignore")
def main(rank, args):
dist.init_process_group(
backend="nccl",
init_method="env://",
world_size=args.world_size,
rank=rank
)
# Fix seed
seed = args.seed + dist.get_rank()
torch.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
torch.cuda.set_device(rank)
trainset = DataFactory(name=args.dataset, partition=args.partitions[0], data_root=args.data_root)
testset = DataFactory(name=args.dataset, partition=args.partitions[1], data_root=args.data_root)
train_loader = DataLoader(
dataset=trainset,
collate_fn=custom_collate, batch_size=args.batch_size,
num_workers=args.num_workers, pin_memory=True, drop_last=True,
sampler=DistributedSampler(
trainset,
num_replicas=args.world_size,
rank=rank)
)
test_loader = DataLoader(
dataset=testset,
collate_fn=custom_collate, batch_size=1,
num_workers=args.num_workers, pin_memory=True, drop_last=False,
sampler=torch.utils.data.SequentialSampler(testset)
)
args.human_idx = 0
if args.dataset == 'hicodet':
object_to_target = train_loader.dataset.dataset.object_to_verb
args.num_classes = 117
elif args.dataset == 'vcoco':
object_to_target = list(train_loader.dataset.dataset.object_to_action.values())
args.num_classes = 24
upt = build_detector(args, object_to_target)
if os.path.exists(args.resume):
print(f"=> Rank {rank}: continue from saved checkpoint {args.resume}")
checkpoint = torch.load(args.resume, map_location='cpu')
upt.load_state_dict(checkpoint['model_state_dict'])
else:
print(f"=> Rank {rank}: start from a randomly initialised model")
engine = CustomisedDLE(
upt, train_loader,
max_norm=args.clip_max_norm,
num_classes=args.num_classes,
print_interval=args.print_interval,
find_unused_parameters=True,
cache_dir=args.output_dir
)
if args.cache:
if args.dataset == 'hicodet':
engine.cache_hico(test_loader, args.output_dir)
elif args.dataset == 'vcoco':
engine.cache_vcoco(test_loader, args.output_dir)
return
if args.eval:
if args.dataset == 'vcoco':
raise NotImplementedError(f"Evaluation on V-COCO has not been implemented.")
ap = engine.test_hico(test_loader)
# Fetch indices for rare and non-rare classes
num_anno = torch.as_tensor(trainset.dataset.anno_interaction)
rare = torch.nonzero(num_anno < 10).squeeze(1)
non_rare = torch.nonzero(num_anno >= 10).squeeze(1)
print(
f"The mAP is {ap.mean():.4f},"
f" rare: {ap[rare].mean():.4f},"
f" none-rare: {ap[non_rare].mean():.4f}"
)
return
for p in upt.detector.parameters():
p.requires_grad = False
param_dicts = [{
"params": [p for n, p in upt.named_parameters()
if "interaction_head" in n and p.requires_grad]
}]
optim = torch.optim.AdamW(
param_dicts, lr=args.lr_head,
weight_decay=args.weight_decay
)
lr_scheduler = torch.optim.lr_scheduler.StepLR(optim, args.lr_drop)
# Override optimiser and learning rate scheduler
engine.update_state_key(optimizer=optim, lr_scheduler=lr_scheduler)
engine(args.epochs)
@torch.no_grad()
def sanity_check(args):
dataset = DataFactory(name='hicodet', partition=args.partitions[0], data_root=args.data_root)
args.human_idx = 0; args.num_classes = 117
object_to_target = dataset.dataset.object_to_verb
upt = build_detector(args, object_to_target)
if args.eval:
upt.eval()
image, target = dataset[0]
outputs = upt([image], [target])
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--lr-head', default=1e-4, type=float)
parser.add_argument('--batch-size', default=2, type=int)
parser.add_argument('--weight-decay', default=1e-4, type=float)
parser.add_argument('--epochs', default=20, type=int)
parser.add_argument('--lr-drop', default=10, type=int)
parser.add_argument('--clip-max-norm', default=0.1, type=float)
parser.add_argument('--backbone', default='resnet50', type=str)
parser.add_argument('--dilation', action='store_true')
parser.add_argument('--position-embedding', default='sine', type=str, choices=('sine', 'learned'))
parser.add_argument('--repr-dim', default=512, type=int)
parser.add_argument('--hidden-dim', default=256, type=int)
parser.add_argument('--enc-layers', default=6, type=int)
parser.add_argument('--dec-layers', default=6, type=int)
parser.add_argument('--dim-feedforward', default=2048, type=int)
parser.add_argument('--dropout', default=0.1, type=float)
parser.add_argument('--nheads', default=8, type=int)
parser.add_argument('--num-queries', default=100, type=int)
parser.add_argument('--pre-norm', action='store_true')
parser.add_argument('--no-aux-loss', dest='aux_loss', action='store_false')
parser.add_argument('--set-cost-class', default=1, type=float)
parser.add_argument('--set-cost-bbox', default=5, type=float)
parser.add_argument('--set-cost-giou', default=2, type=float)
parser.add_argument('--bbox-loss-coef', default=5, type=float)
parser.add_argument('--giou-loss-coef', default=2, type=float)
parser.add_argument('--eos-coef', default=0.1, type=float,
help="Relative classification weight of the no-object class")
parser.add_argument('--alpha', default=0.5, type=float)
parser.add_argument('--gamma', default=0.2, type=float)
parser.add_argument('--dataset', default='hicodet', type=str)
parser.add_argument('--partitions', nargs='+', default=['train2015', 'test2015'], type=str)
parser.add_argument('--num-workers', default=2, type=int)
parser.add_argument('--data-root', default='./hicodet')
# training parameters
parser.add_argument('--device', default='cuda',
help='device to use for training / testing')
parser.add_argument('--port', default='1234', type=str)
parser.add_argument('--seed', default=66, type=int)
parser.add_argument('--pretrained', default='', help='Path to a pretrained detector')
parser.add_argument('--resume', default='', help='Resume from a model')
parser.add_argument('--output-dir', default='checkpoints')
parser.add_argument('--print-interval', default=500, type=int)
parser.add_argument('--world-size', default=1, type=int)
parser.add_argument('--eval', action='store_true')
parser.add_argument('--cache', action='store_true')
parser.add_argument('--sanity', action='store_true')
parser.add_argument('--box-score-thresh', default=0.2, type=float)
parser.add_argument('--fg-iou-thresh', default=0.5, type=float)
parser.add_argument('--min-instances', default=3, type=int)
parser.add_argument('--max-instances', default=15, type=int)
args = parser.parse_args()
print(args)
if args.sanity:
sanity_check(args)
sys.exit()
os.environ["MASTER_ADDR"] = "localhost"
os.environ["MASTER_PORT"] = args.port
mp.spawn(main, nprocs=args.world_size, args=(args,))
| 37.875598 | 102 | 0.676983 | [
"BSD-3-Clause"
] | RomeroBarata/upt | main.py | 7,916 | Python |
def assert_rounded_correct_num_decimals(on_offset_arr, decimals):
__tracebackhide__ = True
assert all(
[len(str(float(boundary_s)).split('.')[-1]) <= decimals
for boundary_s in on_offset_arr]
)
| 31.857143 | 65 | 0.681614 | [
"BSD-3-Clause"
] | NickleDave/conbirt | tests/test_formats/test_seq/asserts.py | 223 | Python |
#!/usr/bin/python
import os, sys # low level handling, such as command line stuff
import string # string methods available
import re # regular expressions
import getopt # comand line argument handling
import math # match functions
from low import * # custom functions, written by myself
# =============================================================================
def show_help( ):
""" displays the program parameter list and usage information """
stdout( "usage: " + sys.argv[0] + " -f <path>" )
stdout( " " )
stdout( " option description" )
stdout( " -h help (this text here)" )
stdout( " -f fasta file to import" )
stdout( " -g map file, tab delimited, regex to name (one per line) to group sequences into distinct bins" )
stdout( " " )
sys.exit(1)
# =============================================================================
def handle_arguments():
""" verifies the presence of all necessary arguments and returns the data dir """
if len ( sys.argv ) == 1:
stderr( "no arguments provided." )
show_help()
try: # check for the right arguments
keys, values = getopt.getopt( sys.argv[1:], "hf:g:" )
except getopt.GetoptError:
stderr( "invalid arguments provided." )
show_help()
args = {}
for key, value in keys:
if key == '-f': args['file'] = value
if key == '-g': args['group'] = value
if not args.has_key('file'):
stderr( "import file argument missing." )
show_help()
elif not file_exists( args.get('file') ):
stderr( "import file does not exist." )
show_help()
return args
# =============================================================================
def read_groups( file ):
groups = {}
fo = open( file )
for line in fo:
line = line.rstrip()
regex, name = line.split("\t")
groups[name] = re.compile(regex)
fo.close()
return groups
# =============================================================================
def read_sequences( file, groups ):
def add_entry( hash, groups, id, seq ):
group = "*all*"
for name, regex in groups.iteritems():
if re.search(regex, id):
group = name
break
if hash[group].has_key(id): sys.stderr.write("WARNING: overwriting entry with the same ID (%s) in group %s...\n" %(id, group))
hash[group][id] = seq
return hash
hash = {}
for name, regex in groups.iteritems(): hash[name] = {}
if hash.has_key('*all*'): sys.stderr.write("WARNING: you used \"*all*\" as a group name. This name refers to all non-group-matching entries as well!\n")
hash['*all*'] = {}
id, seq = "", ""
fo = open( file )
for line in fo:
line = line.rstrip()
if line.startswith(">"):
if id != "": add_entry( hash, groups, id, seq )
id = line[1:]
seq = ""
else:
seq += line
if id != "": add_entry( hash, groups, id, seq )
fo.close()
return hash
# =============================================================================
def eval_seq_lengths(hash):
for group, seqhash in hash.iteritems():
for id, seq in seqhash.iteritems():
print string.join([group, id, str(len(seq))], "\t")
# =============================================================================
# === MAIN ====================================================================
# =============================================================================
def main( args ):
groups = {}
if args.has_key('group'): groups = read_groups( args.get('group') )
seqhash = read_sequences( args.get('file'), groups )
eval_seq_lengths(seqhash)
# =============================================================================
args = handle_arguments()
main( args )
| 33.044248 | 154 | 0.497054 | [
"MIT"
] | lotharwissler/bioinformatics | python/fasta/seqlength.py | 3,734 | Python |
# -*- coding: utf-8 -*-
"""
Tests the speed of image updates for an ImageItem and RawImageWidget.
The speed will generally depend on the type of data being shown, whether
it is being scaled and/or converted by lookup table, and whether OpenGL
is used by the view widget
"""
## Add path to library (just for examples; you do not need this)
import initExample
import argparse
import sys
import numpy as np
import pyqtgraph as pg
import pyqtgraph.ptime as ptime
from pyqtgraph.Qt import QtGui, QtCore, QT_LIB
pg.setConfigOption('imageAxisOrder', 'row-major')
import importlib
ui_template = importlib.import_module(f'VideoTemplate_{QT_LIB.lower()}')
try:
import cupy as cp
pg.setConfigOption("useCupy", True)
_has_cupy = True
except ImportError:
cp = None
_has_cupy = False
try:
import numba
_has_numba = True
except ImportError:
numba = None
_has_numba = False
try:
from pyqtgraph.widgets.RawImageWidget import RawImageGLWidget
except ImportError:
RawImageGLWidget = None
parser = argparse.ArgumentParser(description="Benchmark for testing video performance")
parser.add_argument('--cuda', default=False, action='store_true', help="Use CUDA to process on the GPU", dest="cuda")
parser.add_argument('--dtype', default='uint8', choices=['uint8', 'uint16', 'float'], help="Image dtype (uint8, uint16, or float)")
parser.add_argument('--frames', default=3, type=int, help="Number of image frames to generate (default=3)")
parser.add_argument('--image-mode', default='mono', choices=['mono', 'rgb'], help="Image data mode (mono or rgb)", dest='image_mode')
parser.add_argument('--levels', default=None, type=lambda s: tuple([float(x) for x in s.split(',')]), help="min,max levels to scale monochromatic image dynamic range, or rmin,rmax,gmin,gmax,bmin,bmax to scale rgb")
parser.add_argument('--lut', default=False, action='store_true', help="Use color lookup table")
parser.add_argument('--lut-alpha', default=False, action='store_true', help="Use alpha color lookup table", dest='lut_alpha')
parser.add_argument('--size', default='512x512', type=lambda s: tuple([int(x) for x in s.split('x')]), help="WxH image dimensions default='512x512'")
args = parser.parse_args(sys.argv[1:])
if RawImageGLWidget is not None:
# don't limit frame rate to vsync
sfmt = QtGui.QSurfaceFormat()
sfmt.setSwapInterval(0)
QtGui.QSurfaceFormat.setDefaultFormat(sfmt)
app = pg.mkQApp("Video Speed Test Example")
win = QtGui.QMainWindow()
win.setWindowTitle('pyqtgraph example: VideoSpeedTest')
ui = ui_template.Ui_MainWindow()
ui.setupUi(win)
win.show()
if RawImageGLWidget is None:
ui.rawGLRadio.setEnabled(False)
ui.rawGLRadio.setText(ui.rawGLRadio.text() + " (OpenGL not available)")
else:
ui.rawGLImg = RawImageGLWidget()
ui.stack.addWidget(ui.rawGLImg)
# read in CLI args
ui.cudaCheck.setChecked(args.cuda and _has_cupy)
ui.cudaCheck.setEnabled(_has_cupy)
ui.numbaCheck.setChecked(_has_numba and pg.getConfigOption("useNumba"))
ui.numbaCheck.setEnabled(_has_numba)
ui.framesSpin.setValue(args.frames)
ui.widthSpin.setValue(args.size[0])
ui.heightSpin.setValue(args.size[1])
ui.dtypeCombo.setCurrentText(args.dtype)
ui.rgbCheck.setChecked(args.image_mode=='rgb')
ui.maxSpin1.setOpts(value=255, step=1)
ui.minSpin1.setOpts(value=0, step=1)
levelSpins = [ui.minSpin1, ui.maxSpin1, ui.minSpin2, ui.maxSpin2, ui.minSpin3, ui.maxSpin3]
if args.cuda and _has_cupy:
xp = cp
else:
xp = np
if args.levels is None:
ui.scaleCheck.setChecked(False)
ui.rgbLevelsCheck.setChecked(False)
else:
ui.scaleCheck.setChecked(True)
if len(args.levels) == 2:
ui.rgbLevelsCheck.setChecked(False)
ui.minSpin1.setValue(args.levels[0])
ui.maxSpin1.setValue(args.levels[1])
elif len(args.levels) == 6:
ui.rgbLevelsCheck.setChecked(True)
for spin,val in zip(levelSpins, args.levels):
spin.setValue(val)
else:
raise ValueError("levels argument must be 2 or 6 comma-separated values (got %r)" % (args.levels,))
ui.lutCheck.setChecked(args.lut)
ui.alphaCheck.setChecked(args.lut_alpha)
#ui.graphicsView.useOpenGL() ## buggy, but you can try it if you need extra speed.
vb = pg.ViewBox()
ui.graphicsView.setCentralItem(vb)
vb.setAspectLocked()
img = pg.ImageItem()
vb.addItem(img)
LUT = None
def updateLUT():
global LUT, ui
dtype = ui.dtypeCombo.currentText()
if dtype == 'uint8':
n = 256
else:
n = 4096
LUT = ui.gradient.getLookupTable(n, alpha=ui.alphaCheck.isChecked())
if _has_cupy and xp == cp:
LUT = cp.asarray(LUT)
ui.gradient.sigGradientChanged.connect(updateLUT)
updateLUT()
ui.alphaCheck.toggled.connect(updateLUT)
def updateScale():
global ui, levelSpins
if ui.rgbLevelsCheck.isChecked():
for s in levelSpins[2:]:
s.setEnabled(True)
else:
for s in levelSpins[2:]:
s.setEnabled(False)
updateScale()
ui.rgbLevelsCheck.toggled.connect(updateScale)
cache = {}
def mkData():
with pg.BusyCursor():
global data, cache, ui, xp
frames = ui.framesSpin.value()
width = ui.widthSpin.value()
height = ui.heightSpin.value()
cacheKey = (ui.dtypeCombo.currentText(), ui.rgbCheck.isChecked(), frames, width, height)
if cacheKey not in cache:
if cacheKey[0] == 'uint8':
dt = xp.uint8
loc = 128
scale = 64
mx = 255
elif cacheKey[0] == 'uint16':
dt = xp.uint16
loc = 4096
scale = 1024
mx = 2**16 - 1
elif cacheKey[0] == 'float':
dt = xp.float32
loc = 1.0
scale = 0.1
mx = 1.0
else:
raise ValueError(f"unable to handle dtype: {cacheKey[0]}")
chan_shape = (height, width)
if ui.rgbCheck.isChecked():
frame_shape = chan_shape + (3,)
else:
frame_shape = chan_shape
data = xp.empty((frames,) + frame_shape, dtype=dt)
view = data.reshape((-1,) + chan_shape)
for idx in range(view.shape[0]):
subdata = xp.random.normal(loc=loc, scale=scale, size=chan_shape)
# note: gaussian filtering has been removed as it slows down array
# creation greatly.
if cacheKey[0] != 'float':
xp.clip(subdata, 0, mx, out=subdata)
view[idx] = subdata
data[:, 10:50, 10] = mx
data[:, 48, 9:12] = mx
data[:, 47, 8:13] = mx
cache = {cacheKey: data} # clear to save memory (but keep one to prevent unnecessary regeneration)
data = cache[cacheKey]
updateLUT()
updateSize()
def updateSize():
global ui, vb
frames = ui.framesSpin.value()
width = ui.widthSpin.value()
height = ui.heightSpin.value()
dtype = xp.dtype(str(ui.dtypeCombo.currentText()))
rgb = 3 if ui.rgbCheck.isChecked() else 1
ui.sizeLabel.setText('%d MB' % (frames * width * height * rgb * dtype.itemsize / 1e6))
vb.setRange(QtCore.QRectF(0, 0, width, height))
def noticeCudaCheck():
global xp, cache
cache = {}
if ui.cudaCheck.isChecked():
if _has_cupy:
xp = cp
else:
xp = np
ui.cudaCheck.setChecked(False)
else:
xp = np
mkData()
def noticeNumbaCheck():
pg.setConfigOption('useNumba', _has_numba and ui.numbaCheck.isChecked())
mkData()
ui.dtypeCombo.currentIndexChanged.connect(mkData)
ui.rgbCheck.toggled.connect(mkData)
ui.widthSpin.editingFinished.connect(mkData)
ui.heightSpin.editingFinished.connect(mkData)
ui.framesSpin.editingFinished.connect(mkData)
ui.widthSpin.valueChanged.connect(updateSize)
ui.heightSpin.valueChanged.connect(updateSize)
ui.framesSpin.valueChanged.connect(updateSize)
ui.cudaCheck.toggled.connect(noticeCudaCheck)
ui.numbaCheck.toggled.connect(noticeNumbaCheck)
ptr = 0
lastTime = ptime.time()
fps = None
def update():
global ui, ptr, lastTime, fps, LUT, img
if ui.lutCheck.isChecked():
useLut = LUT
else:
useLut = None
downsample = ui.downsampleCheck.isChecked()
if ui.scaleCheck.isChecked():
if ui.rgbLevelsCheck.isChecked():
useScale = [
[ui.minSpin1.value(), ui.maxSpin1.value()],
[ui.minSpin2.value(), ui.maxSpin2.value()],
[ui.minSpin3.value(), ui.maxSpin3.value()]]
else:
useScale = [ui.minSpin1.value(), ui.maxSpin1.value()]
else:
useScale = None
if ui.rawRadio.isChecked():
ui.rawImg.setImage(data[ptr%data.shape[0]], lut=useLut, levels=useScale)
ui.stack.setCurrentIndex(1)
elif ui.rawGLRadio.isChecked():
ui.rawGLImg.setImage(data[ptr%data.shape[0]], lut=useLut, levels=useScale)
ui.stack.setCurrentIndex(2)
else:
img.setImage(data[ptr%data.shape[0]], autoLevels=False, levels=useScale, lut=useLut, autoDownsample=downsample)
ui.stack.setCurrentIndex(0)
#img.setImage(data[ptr%data.shape[0]], autoRange=False)
ptr += 1
now = ptime.time()
dt = now - lastTime
lastTime = now
if fps is None:
fps = 1.0/dt
else:
s = np.clip(dt*3., 0, 1)
fps = fps * (1-s) + (1.0/dt) * s
ui.fpsLabel.setText('%0.2f fps' % fps)
app.processEvents() ## force complete redraw for every plot
timer = QtCore.QTimer()
timer.timeout.connect(update)
timer.start(0)
if __name__ == '__main__':
pg.exec()
| 32.21 | 214 | 0.651868 | [
"MIT"
] | 3DAlgoLab/pyqtgraph | examples/VideoSpeedTest.py | 9,663 | Python |
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.utils import override_settings
from wagtail.tests.testapp.models import SingleEventPage
from wagtail.tests.testapp.rich_text import CustomRichTextArea
from wagtail.tests.utils import WagtailTestUtils
from wagtail.wagtailadmin.rich_text import HalloRichTextArea, get_rich_text_editor_widget
from wagtail.wagtailcore.models import Page, get_page_models
from wagtail.wagtailcore.rich_text import RichText
class BaseRichTextEditHandlerTestCase(TestCase):
def _clear_edit_handler_cache(self):
"""
These tests generate new EditHandlers with different settings. The
cached edit handlers should be cleared before and after each test run
to ensure that no changes leak through to other tests.
"""
from wagtail.tests.testapp.models import DefaultRichBlockFieldPage
block_page_edit_handler = DefaultRichBlockFieldPage.get_edit_handler()
if block_page_edit_handler._form_class:
rich_text_block = block_page_edit_handler._form_class.base_fields['body'].block.child_blocks['rich_text']
if hasattr(rich_text_block, 'field'):
del rich_text_block.field
for page_class in get_page_models():
page_class.get_edit_handler.cache_clear()
def setUp(self):
super(BaseRichTextEditHandlerTestCase, self).setUp()
self._clear_edit_handler_cache()
def tearDown(self):
self._clear_edit_handler_cache()
super(BaseRichTextEditHandlerTestCase, self).tearDown()
class TestGetRichTextEditorWidget(TestCase):
@override_settings()
def test_default(self):
# Simulate the absence of a setting
if hasattr(settings, 'WAGTAILADMIN_RICH_TEXT_EDITORS'):
del settings.WAGTAILADMIN_RICH_TEXT_EDITORS
self.assertIsInstance(get_rich_text_editor_widget(), HalloRichTextArea)
@override_settings(WAGTAILADMIN_RICH_TEXT_EDITORS={
'default': {
'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'
},
})
def test_overridden_default_editor(self):
self.assertIsInstance(get_rich_text_editor_widget(), CustomRichTextArea)
@override_settings(WAGTAILADMIN_RICH_TEXT_EDITORS={
'custom': {
'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'
},
})
def test_custom_editor_without_default(self):
self.assertIsInstance(get_rich_text_editor_widget('custom'), CustomRichTextArea)
@override_settings(WAGTAILADMIN_RICH_TEXT_EDITORS={
'default': {
'WIDGET': 'wagtail.wagtailadmin.rich_text.HalloRichTextArea'
},
'custom': {
'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'
},
})
def test_custom_editor_with_default(self):
self.assertIsInstance(get_rich_text_editor_widget(), HalloRichTextArea)
self.assertIsInstance(get_rich_text_editor_widget('custom'), CustomRichTextArea)
@override_settings()
class TestDefaultRichText(BaseRichTextEditHandlerTestCase, WagtailTestUtils):
def setUp(self):
super(TestDefaultRichText, self).setUp()
# Find root page
self.root_page = Page.objects.get(id=2)
self.login()
# Simulate the absence of a setting
if hasattr(settings, 'WAGTAILADMIN_RICH_TEXT_EDITORS'):
del settings.WAGTAILADMIN_RICH_TEXT_EDITORS
def test_default_editor_in_rich_text_field(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'defaultrichtextfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now)
self.assertContains(response, 'makeHalloRichTextEditable("id_body");')
def test_default_editor_in_rich_text_block(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'defaultrichblockfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now)
self.assertContains(response, 'makeHalloRichTextEditable("__PREFIX__-value");')
@override_settings(WAGTAILADMIN_RICH_TEXT_EDITORS={
'default': {
'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'
},
})
class TestOverriddenDefaultRichText(BaseRichTextEditHandlerTestCase, WagtailTestUtils):
def setUp(self):
super(TestOverriddenDefaultRichText, self).setUp()
# Find root page
self.root_page = Page.objects.get(id=2)
self.login()
def test_overridden_default_editor_in_rich_text_field(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'defaultrichtextfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now) was replaced with fake editor
self.assertNotContains(response, 'makeHalloRichTextEditable("id_body");')
self.assertContains(response, 'customEditorInitScript("id_body");')
def test_overridden_default_editor_in_rich_text_block(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'defaultrichblockfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now) was replaced with fake editor
self.assertNotContains(response, 'makeHalloRichTextEditable("__PREFIX__-value");')
self.assertContains(response, 'customEditorInitScript("__PREFIX__-value");')
@override_settings(WAGTAILADMIN_RICH_TEXT_EDITORS={
'default': {
'WIDGET': 'wagtail.wagtailadmin.rich_text.HalloRichTextArea'
},
'custom': {
'WIDGET': 'wagtail.tests.testapp.rich_text.CustomRichTextArea'
},
})
class TestCustomDefaultRichText(BaseRichTextEditHandlerTestCase, WagtailTestUtils):
def setUp(self):
super(TestCustomDefaultRichText, self).setUp()
# Find root page
self.root_page = Page.objects.get(id=2)
self.login()
def test_custom_editor_in_rich_text_field(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'customrichtextfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now) was replaced with fake editor
self.assertNotContains(response, 'makeHalloRichTextEditable("id_body");')
self.assertContains(response, 'customEditorInitScript("id_body");')
def test_custom_editor_in_rich_text_block(self):
response = self.client.get(reverse(
'wagtailadmin_pages:add', args=('tests', 'customrichblockfieldpage', self.root_page.id)
))
# Check status code
self.assertEqual(response.status_code, 200)
# Check that hallo (default editor by now) was replaced with fake editor
self.assertNotContains(response, 'makeHalloRichTextEditable("__PREFIX__-value");')
self.assertContains(response, 'customEditorInitScript("__PREFIX__-value");')
class TestRichTextValue(TestCase):
def setUp(self):
self.root_page = Page.objects.get(id=2)
self.single_event_page = SingleEventPage(
title="foo",
location='the moon', audience='public',
cost='free', date_from='2001-01-01',
)
self.root_page.add_child(instance=self.single_event_page)
def test_render(self):
text = '<p>To the <a linktype="page" id="{}">moon</a>!</p>'.format(
self.single_event_page.id
)
value = RichText(text)
result = str(value)
expected = (
'<div class="rich-text"><p>To the <a href="'
'/foo/pointless-suffix/">moon</a>!</p></div>')
self.assertEqual(result, expected)
| 36.90625 | 117 | 0.697835 | [
"BSD-3-Clause"
] | Girbons/wagtail | wagtail/wagtailadmin/tests/test_rich_text.py | 8,267 | Python |
#!/usr/bin/env python
# Copyright (c) 2018, The Regents of the University of California
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the University of California nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OF THE UNIVERSITY OF CALIFORNIA
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import math
import serial
import sys
import threading
import rospy
import std_msgs.msg
import tf
class PololuSerial(object):
def __init__(self, tty_device='/dev/ttyACM0'):
self._serial = serial.Serial(tty_device, timeout=0.2)
self._lock = threading.Lock()
def _SendCommand(self, command_sequence, reply_length=0):
# TODO(shengye): Check command_sequence is an iterable of integers [0, 255]
buf_out = bytearray(command_sequence)
with self._lock:
self._serial.write(buf_out)
if reply_length > 0:
buf_in = bytearray(self._serial.read(reply_length))
assert len(buf_in) == reply_length
else:
buf_in = None
return buf_in
def Close(self):
with self._lock:
self._serial.close()
self._serial = None
def SetTarget(self, target_val, channel=0):
command = [0x84, channel, target_val & 0x7F, (target_val >> 7) & 0x7F]
self._SendCommand(command)
def SetSpeed(self, speed_val, channel=0):
command = [0x87, channel, speed_val & 0x7F, (speed_val >> 7) & 0x7F]
self._SendCommand(command)
def SetAcceleration(self, acc_val, channel=0):
command = [0x89, channel, acc_val & 0x7F, (acc_val >> 7) & 0x7F]
self._SendCommand(command)
def GetPos(self, channel=0):
command = [0x90, channel]
result = self._SendCommand(command, 2)
return_val = (result[1] << 8) | result[0]
return return_val
def main(argv):
rospy.init_node("head_laser_servo_tf")
# Parameters
tty_device = rospy.get_param("~tty_device", "/dev/ttyACM0")
acceleration = rospy.get_param("~acceleration", 20)
speed = rospy.get_param("~speed", 10)
min_val = rospy.get_param("~min_val", 885 * 4)
min_deg = rospy.get_param("~min_deg", -90)
max_val = rospy.get_param("~max_val", 1900 * 4)
max_deg = rospy.get_param("~max_deg", 0)
default_deg = rospy.get_param("~default_deg", -90)
fixed_frame = rospy.get_param("~fixed_frame", "head_laser_servo_base")
rotating_frame = rospy.get_param("~rotating_frame",
"head_laser_servo_mount")
time_adj = rospy.get_param("~time_adj", 0.125)
tf_pub_rate = rospy.get_param("~tf_pub_rate", 20)
dev = PololuSerial(tty_device)
dev.SetAcceleration(acceleration)
dev.SetSpeed(speed)
tf_broadcaster = tf.TransformBroadcaster()
disable_tf_publisher = [False]
latest_deg = [min_deg]
def MoveToDeg(target_deg):
target = int((target_deg - min_deg) / (max_deg - min_deg) *
(max_val - min_val) + min_val)
dev.SetTarget(target)
pos = float(dev.GetPos())
disable_tf_publisher[0] = True
while pos != target:
deg = ((pos - min_val) / (max_val - min_val) * (max_deg - min_deg)
+ min_deg)
tf_broadcaster.sendTransform(
(0, 0, 0),
tf.transformations.quaternion_from_euler(
0, -deg / 180.0 * math.pi, 0),
rospy.Time.now() + rospy.Duration(time_adj),
rotating_frame,
fixed_frame)
latest_deg[0] = deg
rospy.loginfo("Degree: %f, Value: %f", deg, pos)
pos = float(dev.GetPos())
disable_tf_publisher[0] = False
def HeadLaserAngleCallback(data):
angle = data.data
if angle < min_deg or angle > max_deg:
rospy.logerr("%f is not between [%f, %f]", angle, min_deg, max_deg)
else:
MoveToDeg(angle)
MoveToDeg(min_deg)
MoveToDeg(max_deg)
MoveToDeg(default_deg)
rospy.Subscriber("/head_laser/angle", std_msgs.msg.Float64,
HeadLaserAngleCallback)
rospy.loginfo("Ready to serve.")
tf_pub_rate_rospy_rate = rospy.Rate(tf_pub_rate)
while not rospy.is_shutdown():
if not disable_tf_publisher[0]:
tf_broadcaster.sendTransform(
(0, 0, 0),
tf.transformations.quaternion_from_euler(
0, -latest_deg[0] / 180.0 * math.pi, 0),
rospy.Time.now() + rospy.Duration(time_adj),
rotating_frame,
fixed_frame)
tf_pub_rate_rospy_rate.sleep()
dev.Close()
if __name__ == "__main__":
main(sys.argv)
| 33.205882 | 79 | 0.692117 | [
"BSD-3-Clause"
] | CogRob/TritonBot | cogrob_ros/cogrob_pololu_lidar/scripts/cogrob_pololu_lidar.py | 5,645 | Python |
"""
The main purpose of this module is to expose LinkCollector.collect_links().
"""
import cgi
import functools
import itertools
import logging
import mimetypes
import os
import re
from collections import OrderedDict
from pip._vendor import html5lib, requests
from pip._vendor.distlib.compat import unescape
from pip._vendor.requests.exceptions import HTTPError, RetryError, SSLError
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip._internal.models.link import Link
from pip._internal.utils.filetypes import ARCHIVE_EXTENSIONS
from pip._internal.utils.misc import pairwise, redact_auth_from_url
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.urls import path_to_url, url_to_path
from pip._internal.vcs import is_url, vcs
if MYPY_CHECK_RUNNING:
from typing import (
Callable, Iterable, List, MutableMapping, Optional,
Protocol, Sequence, Tuple, TypeVar, Union,
)
import xml.etree.ElementTree
from pip._vendor.requests import Response
from pip._internal.models.search_scope import SearchScope
from pip._internal.network.session import PipSession
HTMLElement = xml.etree.ElementTree.Element
ResponseHeaders = MutableMapping[str, str]
# Used in the @lru_cache polyfill.
F = TypeVar('F')
class LruCache(Protocol):
def __call__(self, maxsize=None):
# type: (Optional[int]) -> Callable[[F], F]
raise NotImplementedError
logger = logging.getLogger(__name__)
# Fallback to noop_lru_cache in Python 2
# TODO: this can be removed when python 2 support is dropped!
def noop_lru_cache(maxsize=None):
# type: (Optional[int]) -> Callable[[F], F]
def _wrapper(f):
# type: (F) -> F
return f
return _wrapper
_lru_cache = getattr(functools, "lru_cache", noop_lru_cache) # type: LruCache
def _match_vcs_scheme(url):
# type: (str) -> Optional[str]
"""Look for VCS schemes in the URL.
Returns the matched VCS scheme, or None if there's no match.
"""
for scheme in vcs.schemes:
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
return scheme
return None
def _is_url_like_archive(url):
# type: (str) -> bool
"""Return whether the URL looks like an archive.
"""
filename = Link(url).filename
for bad_ext in ARCHIVE_EXTENSIONS:
if filename.endswith(bad_ext):
return True
return False
class _NotHTML(Exception):
def __init__(self, content_type, request_desc):
# type: (str, str) -> None
super(_NotHTML, self).__init__(content_type, request_desc)
self.content_type = content_type
self.request_desc = request_desc
def _ensure_html_header(response):
# type: (Response) -> None
"""Check the Content-Type header to ensure the response contains HTML.
Raises `_NotHTML` if the content type is not text/html.
"""
content_type = response.headers.get("Content-Type", "")
if not content_type.lower().startswith("text/html"):
raise _NotHTML(content_type, response.request.method)
class _NotHTTP(Exception):
pass
def _ensure_html_response(url, session):
# type: (str, PipSession) -> None
"""Send a HEAD request to the URL, and ensure the response contains HTML.
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
`_NotHTML` if the content type is not text/html.
"""
scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url)
if scheme not in {'http', 'https'}:
raise _NotHTTP()
resp = session.head(url, allow_redirects=True)
resp.raise_for_status()
_ensure_html_header(resp)
def _get_html_response(url, session):
# type: (str, PipSession) -> Response
"""Access an HTML page with GET, and return the response.
This consists of three parts:
1. If the URL looks suspiciously like an archive, send a HEAD first to
check the Content-Type is HTML, to avoid downloading a large file.
Raise `_NotHTTP` if the content type cannot be determined, or
`_NotHTML` if it is not HTML.
2. Actually perform the request. Raise HTTP exceptions on network failures.
3. Check the Content-Type header to make sure we got HTML, and raise
`_NotHTML` otherwise.
"""
if _is_url_like_archive(url):
_ensure_html_response(url, session=session)
logger.debug('Getting page %s', redact_auth_from_url(url))
resp = session.get(
url,
headers={
"Accept": "text/html",
# We don't want to blindly returned cached data for
# /simple/, because authors generally expecting that
# twine upload && pip install will function, but if
# they've done a pip install in the last ~10 minutes
# it won't. Thus by setting this to zero we will not
# blindly use any cached data, however the benefit of
# using max-age=0 instead of no-cache, is that we will
# still support conditional requests, so we will still
# minimize traffic sent in cases where the page hasn't
# changed at all, we will just always incur the round
# trip for the conditional GET now instead of only
# once per 10 minutes.
# For more information, please see pypa/pip#5670.
"Cache-Control": "max-age=0",
},
)
resp.raise_for_status()
# The check for archives above only works if the url ends with
# something that looks like an archive. However that is not a
# requirement of an url. Unless we issue a HEAD request on every
# url we cannot know ahead of time for sure if something is HTML
# or not. However we can check after we've downloaded it.
_ensure_html_header(resp)
return resp
def _get_encoding_from_headers(headers):
# type: (ResponseHeaders) -> Optional[str]
"""Determine if we have any encoding information in our headers.
"""
if headers and "Content-Type" in headers:
content_type, params = cgi.parse_header(headers["Content-Type"])
if "charset" in params:
return params['charset']
return None
def _determine_base_url(document, page_url):
# type: (HTMLElement, str) -> str
"""Determine the HTML document's base URL.
This looks for a ``<base>`` tag in the HTML document. If present, its href
attribute denotes the base URL of anchor tags in the document. If there is
no such tag (or if it does not have a valid href attribute), the HTML
file's URL is used as the base URL.
:param document: An HTML document representation. The current
implementation expects the result of ``html5lib.parse()``.
:param page_url: The URL of the HTML document.
"""
for base in document.findall(".//base"):
href = base.get("href")
if href is not None:
return href
return page_url
def _clean_url_path_part(part):
# type: (str) -> str
"""
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
"""
# We unquote prior to quoting to make sure nothing is double quoted.
return urllib_parse.quote(urllib_parse.unquote(part))
def _clean_file_url_path(part):
# type: (str) -> str
"""
Clean the first part of a URL path that corresponds to a local
filesystem path (i.e. the first part after splitting on "@" characters).
"""
# We unquote prior to quoting to make sure nothing is double quoted.
# Also, on Windows the path part might contain a drive letter which
# should not be quoted. On Linux where drive letters do not
# exist, the colon should be quoted. We rely on urllib.request
# to do the right thing here.
return urllib_request.pathname2url(urllib_request.url2pathname(part))
# percent-encoded: /
_reserved_chars_re = re.compile('(@|%2F)', re.IGNORECASE)
def _clean_url_path(path, is_local_path):
# type: (str, bool) -> str
"""
Clean the path portion of a URL.
"""
if is_local_path:
clean_func = _clean_file_url_path
else:
clean_func = _clean_url_path_part
# Split on the reserved characters prior to cleaning so that
# revision strings in VCS URLs are properly preserved.
parts = _reserved_chars_re.split(path)
cleaned_parts = []
for to_clean, reserved in pairwise(itertools.chain(parts, [''])):
cleaned_parts.append(clean_func(to_clean))
# Normalize %xx escapes (e.g. %2f -> %2F)
cleaned_parts.append(reserved.upper())
return ''.join(cleaned_parts)
def _clean_link(url):
# type: (str) -> str
"""
Make sure a link is fully quoted.
For example, if ' ' occurs in the URL, it will be replaced with "%20",
and without double-quoting other characters.
"""
# Split the URL into parts according to the general structure
# `scheme://netloc/path;parameters?query#fragment`.
result = urllib_parse.urlparse(url)
# If the netloc is empty, then the URL refers to a local filesystem path.
is_local_path = not result.netloc
path = _clean_url_path(result.path, is_local_path=is_local_path)
return urllib_parse.urlunparse(result._replace(path=path))
def _create_link_from_element(
anchor, # type: HTMLElement
page_url, # type: str
base_url, # type: str
):
# type: (...) -> Optional[Link]
"""
Convert an anchor element in a simple repository page to a Link.
"""
href = anchor.get("href")
if not href:
return None
url = _clean_link(urllib_parse.urljoin(base_url, href))
pyrequire = anchor.get('data-requires-python')
pyrequire = unescape(pyrequire) if pyrequire else None
yanked_reason = anchor.get('data-yanked')
if yanked_reason:
# This is a unicode string in Python 2 (and 3).
yanked_reason = unescape(yanked_reason)
link = Link(
url,
comes_from=page_url,
requires_python=pyrequire,
yanked_reason=yanked_reason,
)
return link
class CacheablePageContent(object):
def __init__(self, page):
# type: (HTMLPage) -> None
assert page.cache_link_parsing
self.page = page
def __eq__(self, other):
# type: (object) -> bool
return (isinstance(other, type(self)) and
self.page.url == other.page.url)
def __hash__(self):
# type: () -> int
return hash(self.page.url)
def with_cached_html_pages(
fn, # type: Callable[[HTMLPage], Iterable[Link]]
):
# type: (...) -> Callable[[HTMLPage], List[Link]]
"""
Given a function that parses an Iterable[Link] from an HTMLPage, cache the
function's result (keyed by CacheablePageContent), unless the HTMLPage
`page` has `page.cache_link_parsing == False`.
"""
@_lru_cache(maxsize=None)
def wrapper(cacheable_page):
# type: (CacheablePageContent) -> List[Link]
return list(fn(cacheable_page.page))
@functools.wraps(fn)
def wrapper_wrapper(page):
# type: (HTMLPage) -> List[Link]
if page.cache_link_parsing:
return wrapper(CacheablePageContent(page))
return list(fn(page))
return wrapper_wrapper
@with_cached_html_pages
def parse_links(page):
# type: (HTMLPage) -> Iterable[Link]
"""
Parse an HTML document, and yield its anchor elements as Link objects.
"""
document = html5lib.parse(
page.content,
transport_encoding=page.encoding,
namespaceHTMLElements=False,
)
url = page.url
base_url = _determine_base_url(document, url)
for anchor in document.findall(".//a"):
link = _create_link_from_element(
anchor,
page_url=url,
base_url=base_url,
)
if link is None:
continue
yield link
class HTMLPage(object):
"""Represents one page, along with its URL"""
def __init__(
self,
content, # type: bytes
encoding, # type: Optional[str]
url, # type: str
cache_link_parsing=True, # type: bool
):
# type: (...) -> None
"""
:param encoding: the encoding to decode the given content.
:param url: the URL from which the HTML was downloaded.
:param cache_link_parsing: whether links parsed from this page's url
should be cached. PyPI index urls should
have this set to False, for example.
"""
self.content = content
self.encoding = encoding
self.url = url
self.cache_link_parsing = cache_link_parsing
def __str__(self):
# type: () -> str
return redact_auth_from_url(self.url)
def _handle_get_page_fail(
link, # type: Link
reason, # type: Union[str, Exception]
meth=None # type: Optional[Callable[..., None]]
):
# type: (...) -> None
if meth is None:
meth = logger.debug
meth("Could not fetch URL %s: %s - skipping", link, reason)
def _make_html_page(response, cache_link_parsing=True):
# type: (Response, bool) -> HTMLPage
encoding = _get_encoding_from_headers(response.headers)
return HTMLPage(
response.content,
encoding=encoding,
url=response.url,
cache_link_parsing=cache_link_parsing)
def _get_html_page(link, session=None):
# type: (Link, Optional[PipSession]) -> Optional[HTMLPage]
if session is None:
raise TypeError(
"_get_html_page() missing 1 required keyword argument: 'session'"
)
url = link.url.split('#', 1)[0]
# Check for VCS schemes that do not support lookup as web pages.
vcs_scheme = _match_vcs_scheme(url)
if vcs_scheme:
logger.debug('Cannot look at %s URL %s', vcs_scheme, link)
return None
# Tack index.html onto file:// URLs that point to directories
scheme, _, path, _, _, _ = urllib_parse.urlparse(url)
if (scheme == 'file' and os.path.isdir(urllib_request.url2pathname(path))):
# add trailing slash if not present so urljoin doesn't trim
# final segment
if not url.endswith('/'):
url += '/'
url = urllib_parse.urljoin(url, 'index.html')
logger.debug(' file: URL is directory, getting %s', url)
try:
resp = _get_html_response(url, session=session)
except _NotHTTP:
logger.debug(
'Skipping page %s because it looks like an archive, and cannot '
'be checked by HEAD.', link,
)
except _NotHTML as exc:
logger.warning(
'Skipping page %s because the %s request got Content-Type: %s.'
'The only supported Content-Type is text/html',
link, exc.request_desc, exc.content_type,
)
except HTTPError as exc:
_handle_get_page_fail(link, exc)
except RetryError as exc:
_handle_get_page_fail(link, exc)
except SSLError as exc:
reason = "There was a problem confirming the ssl certificate: "
reason += str(exc)
_handle_get_page_fail(link, reason, meth=logger.info)
except requests.ConnectionError as exc:
_handle_get_page_fail(link, "connection error: {}".format(exc))
except requests.Timeout:
_handle_get_page_fail(link, "timed out")
else:
return _make_html_page(resp,
cache_link_parsing=link.cache_link_parsing)
return None
def _remove_duplicate_links(links):
# type: (Iterable[Link]) -> List[Link]
"""
Return a list of links, with duplicates removed and ordering preserved.
"""
# We preserve the ordering when removing duplicates because we can.
return list(OrderedDict.fromkeys(links))
def group_locations(locations, expand_dir=False):
# type: (Sequence[str], bool) -> Tuple[List[str], List[str]]
"""
Divide a list of locations into two groups: "files" (archives) and "urls."
:return: A pair of lists (files, urls).
"""
files = []
urls = []
# puts the url for the given file path into the appropriate list
def sort_path(path):
# type: (str) -> None
url = path_to_url(path)
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
urls.append(url)
else:
files.append(url)
for url in locations:
is_local_path = os.path.exists(url)
is_file_url = url.startswith('file:')
if is_local_path or is_file_url:
if is_local_path:
path = url
else:
path = url_to_path(url)
if os.path.isdir(path):
if expand_dir:
path = os.path.realpath(path)
for item in os.listdir(path):
sort_path(os.path.join(path, item))
elif is_file_url:
urls.append(url)
else:
logger.warning(
"Path '{0}' is ignored: "
"it is a directory.".format(path),
)
elif os.path.isfile(path):
sort_path(path)
else:
logger.warning(
"Url '%s' is ignored: it is neither a file "
"nor a directory.", url,
)
elif is_url(url):
# Only add url with clear scheme
urls.append(url)
else:
logger.warning(
"Url '%s' is ignored. It is either a non-existing "
"path or lacks a specific scheme.", url,
)
return files, urls
class CollectedLinks(object):
"""
Encapsulates the return value of a call to LinkCollector.collect_links().
The return value includes both URLs to project pages containing package
links, as well as individual package Link objects collected from other
sources.
This info is stored separately as:
(1) links from the configured file locations,
(2) links from the configured find_links, and
(3) urls to HTML project pages, as described by the PEP 503 simple
repository API.
"""
def __init__(
self,
files, # type: List[Link]
find_links, # type: List[Link]
project_urls, # type: List[Link]
):
# type: (...) -> None
"""
:param files: Links from file locations.
:param find_links: Links from find_links.
:param project_urls: URLs to HTML project pages, as described by
the PEP 503 simple repository API.
"""
self.files = files
self.find_links = find_links
self.project_urls = project_urls
class LinkCollector(object):
"""
Responsible for collecting Link objects from all configured locations,
making network requests as needed.
The class's main method is its collect_links() method.
"""
def __init__(
self,
session, # type: PipSession
search_scope, # type: SearchScope
):
# type: (...) -> None
self.search_scope = search_scope
self.session = session
@property
def find_links(self):
# type: () -> List[str]
return self.search_scope.find_links
def fetch_page(self, location):
# type: (Link) -> Optional[HTMLPage]
"""
Fetch an HTML page containing package links.
"""
return _get_html_page(location, session=self.session)
def collect_links(self, project_name):
# type: (str) -> CollectedLinks
"""Find all available links for the given project name.
:return: All the Link objects (unfiltered), as a CollectedLinks object.
"""
search_scope = self.search_scope
index_locations = search_scope.get_index_urls_locations(project_name)
index_file_loc, index_url_loc = group_locations(index_locations)
fl_file_loc, fl_url_loc = group_locations(
self.find_links, expand_dir=True,
)
file_links = [
Link(url) for url in itertools.chain(index_file_loc, fl_file_loc)
]
# We trust every directly linked archive in find_links
find_link_links = [Link(url, '-f') for url in self.find_links]
# We trust every url that the user has given us whether it was given
# via --index-url or --find-links.
# We want to filter out anything that does not have a secure origin.
url_locations = [
link for link in itertools.chain(
# Mark PyPI indices as "cache_link_parsing == False" -- this
# will avoid caching the result of parsing the page for links.
(Link(url, cache_link_parsing=False) for url in index_url_loc),
(Link(url) for url in fl_url_loc),
)
if self.session.is_secure_origin(link)
]
url_locations = _remove_duplicate_links(url_locations)
lines = [
'{} location(s) to search for versions of {}:'.format(
len(url_locations), project_name,
),
]
for link in url_locations:
lines.append('* {}'.format(link))
logger.debug('\n'.join(lines))
return CollectedLinks(
files=file_links,
find_links=find_link_links,
project_urls=url_locations,
)
| 32.59276 | 79 | 0.630941 | [
"MIT"
] | FFY00/pip | src/pip/_internal/index/collector.py | 21,609 | Python |
import tarfile
import re
import logging
from dipper.sources.Source import Source
from dipper.models.assoc.OrthologyAssoc import OrthologyAssoc
from dipper.models.Model import Model
from dipper.models.Dataset import Dataset
from dipper import config
from dipper import curie_map
__author__ = 'nicole'
logger = logging.getLogger(__name__)
class Panther(Source):
"""
The pairwise orthology calls from Panther DB:
http://pantherdb.org/ encompass 22 species,
from the RefGenome and HCOP projects.
Here, we map the orthology classes to RO homology relationships
This resource may be extended in the future with additional species.
This currently makes a graph of orthologous relationships between genes,
with the assumption that gene metadata (labels, equivalent ids) are
provided from other sources.
Gene families are nominally created from the orthology files,
though these are incomplete with no hierarchical (subfamily) information.
This will get updated from the HMM files in the future.
Note that there is a fair amount of identifier cleanup performed to align
with our standard CURIE prefixes.
The test graph of data is output based on configured
"protein" identifiers in conf.json.
By default, this will produce a file with ALL orthologous relationships.
IF YOU WANT ONLY A SUBSET, YOU NEED TO PROVIDE A FILTER UPON CALLING THIS
WITH THE TAXON IDS
"""
PNTHDL = 'ftp://ftp.pantherdb.org/ortholog/current_release'
files = {
'refgenome': {
'file': 'RefGenomeOrthologs.tar.gz',
'url': PNTHDL+'/RefGenomeOrthologs.tar.gz'},
'hcop': {
'file': 'Orthologs_HCOP.tar.gz',
'url': PNTHDL+'/Orthologs_HCOP.tar.gz'}
}
def __init__(self, graph_type, are_bnodes_skolemized, tax_ids=None):
super().__init__(graph_type, are_bnodes_skolemized, 'panther')
self.tax_ids = tax_ids
self.dataset = Dataset(
'panther', 'Protein ANalysis THrough Evolutionary Relationships',
'http://pantherdb.org/', None,
'http://www.pantherdb.org/terms/disclaimer.jsp')
# # Defaults
# if self.tax_ids is None:
# self.tax_ids = [9606, 10090, 7955]
if 'test_ids' not in config.get_config() \
or 'protein' not in config.get_config()['test_ids']:
logger.warning("not configured with gene test ids.")
else:
self.test_ids = config.get_config()['test_ids']['protein']
return
def fetch(self, is_dl_forced=False):
"""
:return: None
"""
self.get_files(is_dl_forced)
# TODO the version number is tricky to get
# we can't get it from redirects of the url
# TODO use the remote timestamp of the file?
return
def parse(self, limit=None):
"""
:return: None
"""
if self.testOnly:
self.testMode = True
if self.tax_ids is None:
logger.info(
"No taxon filter set; Dumping all orthologous associations.")
else:
logger.info(
"Only the following taxa will be dumped: %s",
str(self.tax_ids))
self._get_orthologs(limit)
return
def _get_orthologs(self, limit):
"""
This will process each of the specified pairwise orthology files,
creating orthology associations based on the specified orthology code.
this currently assumes that each of the orthology files is identically
formatted. Relationships are made between genes here.
There is also a nominal amount of identifier re-formatting:
MGI:MGI --> MGI
Ensembl --> ENSEMBL
we skip any genes where we don't know how to map the gene identifiers.
For example, Gene:Huwe1 for RAT is not an identifier, so we skip any
mappings to this identifier. Often, the there are two entries for the
same gene (base on equivalent Uniprot id), and so we are not actually
losing any information.
We presently have a hard-coded filter to select only orthology
relationships where one of the pair is in our species of interest
(Mouse and Human, for the moment).
This will be added as a configurable parameter in the future.
Genes are also added to a grouping class defined with a PANTHER id.
Triples:
<gene1_id> RO:othologous <gene2_id>
<assoc_id> :hasSubject <gene1_id>
<assoc_id> :hasObject <gene2_id>
<assoc_id> :hasPredicate <RO:orthologous>
<assoc_id> dc:evidence ECO:phylogenetic_evidence
<panther_id> a DATA:gene_family
<panther_id> RO:has_member <gene1_id>
<panther_id> RO:has_member <gene2_id>
:param limit:
:return:
"""
logger.info("getting orthologs")
if self.testMode:
g = self.testgraph
else:
g = self.graph
model = Model(g)
unprocessed_gene_ids = set()
for k in self.files.keys():
f = '/'.join((self.rawdir, self.files[k]['file']))
matchcounter = 0
mytar = tarfile.open(f, 'r:gz')
# assume that the first entry is the item
fname = mytar.getmembers()[0]
logger.info("Parsing %s", fname.name)
line_counter = 0
with mytar.extractfile(fname) as csvfile:
for line in csvfile:
# skip comment lines
if re.match(r'^#', line.decode()):
logger.info("Skipping header line")
continue
line_counter += 1
# a little feedback to the user since there's so many
if line_counter % 1000000 == 0:
logger.info(
"Processed %d lines from %s",
line_counter, fname.name)
line = line.decode().strip()
# parse each row. ancestor_taxon is unused
# HUMAN|Ensembl=ENSG00000184730|UniProtKB=Q0VD83
# MOUSE|MGI=MGI=2176230|UniProtKB=Q8VBT6
# LDO Euarchontoglires PTHR15964
(a, b, orthology_class, ancestor_taxon,
panther_id) = line.split('\t')
(species_a, gene_a, protein_a) = a.split('|')
(species_b, gene_b, protein_b) = b.split('|')
# skip the entries that don't have homolog relationships
# with the test ids
if self.testMode and not (
re.sub(r'UniProtKB=', '',
protein_a) in self.test_ids or
re.sub(r'UniProtKB=', '', protein_b)
in self.test_ids):
continue
# map the taxon abbreviations to ncbi taxon ids
taxon_a = self._map_taxon_abbr_to_id(species_a)
taxon_b = self._map_taxon_abbr_to_id(species_b)
# ###uncomment the following code block
# if you want to filter based on taxid of favorite animals
# taxids = [9606,10090,10116,7227,7955,6239,8355]
# taxids = [9606] #human only
# retain only those orthologous relationships to genes
# in the specified taxids
# using AND will get you only those associations where
# gene1 AND gene2 are in the taxid list (most-filter)
# using OR will get you any associations where
# gene1 OR gene2 are in the taxid list (some-filter)
if (
self.tax_ids is not None and
(int(re.sub(r'NCBITaxon:', '', taxon_a.rstrip()))
not in self.tax_ids) and
(int(re.sub(
r'NCBITaxon:', '', taxon_b.rstrip())) not in
self.tax_ids)):
continue
else:
matchcounter += 1
if limit is not None and matchcounter > limit:
break
# ### end code block for filtering on taxon
# fix the gene identifiers
gene_a = re.sub(r'=', ':', gene_a)
gene_b = re.sub(r'=', ':', gene_b)
clean_gene = self._clean_up_gene_id(gene_a, species_a)
if clean_gene is None:
unprocessed_gene_ids.add(gene_a)
gene_a = clean_gene
clean_gene = self._clean_up_gene_id(gene_b, species_b)
if clean_gene is None:
unprocessed_gene_ids.add(gene_b)
gene_b = clean_gene
# a special case here; mostly some rat genes
# they use symbols instead of identifiers. will skip
if gene_a is None or gene_b is None:
continue
rel = self._map_orthology_code_to_RO(orthology_class)
evidence_id = 'ECO:0000080' # phylogenetic evidence
# add the association and relevant nodes to graph
assoc = OrthologyAssoc(g, self.name, gene_a, gene_b, rel)
assoc.add_evidence(evidence_id)
# add genes to graph;
# assume labels will be taken care of elsewhere
model.addClassToGraph(gene_a, None)
model.addClassToGraph(gene_b, None)
# might as well add the taxon info for completeness
g.addTriple(
gene_a, model.object_properties['in_taxon'], taxon_a)
g.addTriple(
gene_b, model.object_properties['in_taxon'], taxon_b)
assoc.add_association_to_graph()
# note this is incomplete...
# it won't construct the full family hierarchy,
# just the top-grouping
assoc.add_gene_family_to_graph(
':'.join(('PANTHER', panther_id)))
if not self.testMode \
and limit is not None and line_counter > limit:
break
logger.info("finished processing %s", f)
logger.warning(
"The following gene ids were unable to be processed: %s",
str(unprocessed_gene_ids))
return
@staticmethod
def _map_taxon_abbr_to_id(ptax):
"""
Will map the panther-specific taxon abbreviations to NCBI taxon numbers
:param ptax:
:return: NCBITaxon id
"""
taxid = None
ptax_to_taxid_map = {
'ANOCA': 28377, # green lizard
'ARATH': 3702, # arabadopsis
'BOVIN': 9913, # cow
'CAEEL': 6239, # worm
'CANFA': 9615, # dog
'CHICK': 9031, # chicken
'DANRE': 7955, # zebrafish
'DICDI': 44689, # discodium
'DROME': 7227, # drosophila melanogaster
'ECOLI': 562,
'HORSE': 9796, # horses
'HUMAN': 9606, # humans
'MACMU': 9544, # macaque
'MONDO': 13616, # opossum
'MOUSE': 10090, # mouse
'ORNAN': 9258, # orangutan
'PANTR': 9598, # chimp
'PIG': 9823,
'RAT': 10116,
'SCHPO': 4896, # pombe yeast
'TAKRU': 31033, # pufferfish
'XENTR': 8364, # xenopus
'YEAST': 4932, # yeast
}
if ptax in ptax_to_taxid_map:
taxid = ':'.join(('NCBITaxon', str(ptax_to_taxid_map.get(ptax))))
else:
logger.error("unmapped taxon code %s", ptax)
return taxid
@staticmethod
def _map_orthology_code_to_RO(ortho):
"""
Map the panther-specific orthology code (P,O,LDO,X,LDX)
to relationship-ontology
identifiers.
:param ortho: orthology code
:return: RO identifier
"""
ortho_rel = OrthologyAssoc.ortho_rel
ro_id = ortho_rel['orthologous'] # in orthology relationship with
ortho_to_ro_map = {
'P': ortho_rel['paralogous'],
'O': ortho_rel['orthologous'],
'LDO': ortho_rel['least_diverged_orthologous'],
'X': ortho_rel['xenologous'],
'LDX': ortho_rel['xenologous']
}
if ortho in ortho_to_ro_map:
ro_id = ortho_to_ro_map.get(ortho)
else:
logger.warning(
"unmapped orthology code %s. Defaulting to 'orthology'", ortho)
return ro_id
@staticmethod
def _clean_up_gene_id(geneid, sp):
"""
A series of identifier rewriting to conform with
standard gene identifiers.
:param geneid:
:param sp:
:return:
"""
# special case for MGI
geneid = re.sub(r'MGI:MGI:', 'MGI:', geneid)
# rewrite Ensembl --> ENSEMBL
geneid = re.sub(r'Ensembl', 'ENSEMBL', geneid)
# rewrite Gene:CELE --> WormBase
# these are old-school cosmid identifier
geneid = re.sub(r'Gene:CELE', 'WormBase:', geneid)
if sp == 'CAEEL':
if re.match(r'(Gene|ENSEMBLGenome):\w+\.\d+', geneid):
geneid = re.sub(
r'(?:Gene|ENSEMBLGenome):(\w+\.\d+)',
r'WormBase:\1', geneid)
if sp == 'DROME':
if re.match(r'(ENSEMBLGenome):\w+\.\d+', geneid):
geneid = re.sub(
r'(?:ENSEMBLGenome):(\w+\.\d+)', r'FlyBase:\1', geneid)
# rewrite GeneID --> NCBIGene
geneid = re.sub(r'GeneID', 'NCBIGene', geneid)
# rewrite Gene:Dmel --> FlyBase
geneid = re.sub(r'Gene:Dmel_', 'FlyBase:', geneid)
# rewrite Gene:CG --> FlyBase:CG
geneid = re.sub(r'Gene:CG', 'FlyBase:CG', geneid)
# rewrite ENSEMBLGenome:FBgn --> FlyBase:FBgn
geneid = re.sub(r'ENSEMBLGenome:FBgn', 'FlyBase:FBgn', geneid)
# rewrite Gene:<ensembl ids> --> ENSEMBL:<id>
geneid = re.sub(r'Gene:ENS', 'ENSEMBL:ENS', geneid)
# rewrite Gene:<Xenbase ids> --> Xenbase:<id>
geneid = re.sub(r'Gene:Xenbase:', 'Xenbase:', geneid)
# TODO this would be much better done as
# if foo not in curie_map:
# if re.match(r'(Gene|ENSEMBLGenome):', geneid) or \
# re.match(r'Gene_ORFName', geneid) or \
# re.match(r'Gene_Name', geneid):
# # logger.warning(
# #"Found an identifier I don't know how to fix (species %s): %s",
# # sp, geneid)
pfxlcl = re.split(r':', geneid)
pfx = pfxlcl[0]
if pfx is None or pfx not in curie_map.get():
logger.warning("No curie prefix for (species %s): %s", sp, geneid)
geneid = None
return geneid
def getTestSuite(self):
import unittest
from tests.test_panther import PantherTestCase
test_suite = unittest.TestLoader().loadTestsFromTestCase(
PantherTestCase)
return test_suite
| 37.037825 | 79 | 0.544329 | [
"BSD-3-Clause"
] | putmantime/dipper | dipper/sources/Panther.py | 15,667 | Python |
# -*- coding: utf-8 -*-
#-------------------------------------------------------------------------
# Vulkan CTS
# ----------
#
# Copyright (c) 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#-------------------------------------------------------------------------
import os
import re
import sys
import copy
from itertools import chain
from collections import OrderedDict
sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "..", "scripts"))
from build.common import DEQP_DIR
from khr_util.format import indentLines, writeInlFile
VULKAN_H = os.path.join(os.path.dirname(__file__), "src", "vulkan.h.in")
VULKAN_DIR = os.path.join(os.path.dirname(__file__), "..", "framework", "vulkan")
INL_HEADER = """\
/* WARNING: This is auto-generated file. Do not modify, since changes will
* be lost! Modify the generating script instead.
*/\
"""
DEFINITIONS = [
("VK_API_VERSION_1_0", "deUint32"),
("VK_API_VERSION_1_1", "deUint32"),
("VK_MAX_PHYSICAL_DEVICE_NAME_SIZE", "size_t"),
("VK_MAX_EXTENSION_NAME_SIZE", "size_t"),
("VK_MAX_DRIVER_NAME_SIZE_KHR", "size_t"),
("VK_MAX_DRIVER_INFO_SIZE_KHR", "size_t"),
("VK_UUID_SIZE", "size_t"),
("VK_LUID_SIZE", "size_t"),
("VK_MAX_MEMORY_TYPES", "size_t"),
("VK_MAX_MEMORY_HEAPS", "size_t"),
("VK_MAX_DESCRIPTION_SIZE", "size_t"),
("VK_MAX_DEVICE_GROUP_SIZE", "size_t"),
("VK_ATTACHMENT_UNUSED", "deUint32"),
("VK_SUBPASS_EXTERNAL", "deUint32"),
("VK_QUEUE_FAMILY_IGNORED", "deUint32"),
("VK_QUEUE_FAMILY_EXTERNAL", "deUint32"),
("VK_REMAINING_MIP_LEVELS", "deUint32"),
("VK_REMAINING_ARRAY_LAYERS", "deUint32"),
("VK_WHOLE_SIZE", "vk::VkDeviceSize"),
("VK_TRUE", "vk::VkBool32"),
("VK_FALSE", "vk::VkBool32"),
]
PLATFORM_TYPES = [
# VK_KHR_xlib_surface
(["Display","*"], ["XlibDisplayPtr"], "void*"),
(["Window"], ["XlibWindow"], "deUintptr",),
(["VisualID"], ["XlibVisualID"], "deUint32"),
# VK_KHR_xcb_surface
(["xcb_connection_t", "*"], ["XcbConnectionPtr"], "void*"),
(["xcb_window_t"], ["XcbWindow"], "deUintptr"),
(["xcb_visualid_t"], ["XcbVisualid"], "deUint32"),
# VK_KHR_wayland_surface
(["struct", "wl_display","*"], ["WaylandDisplayPtr"], "void*"),
(["struct", "wl_surface", "*"], ["WaylandSurfacePtr"], "void*"),
# VK_KHR_mir_surface
(["MirConnection", "*"], ["MirConnectionPtr"], "void*"),
(["MirSurface", "*"], ["MirSurfacePtr"], "void*"),
# VK_KHR_android_surface
(["ANativeWindow", "*"], ["AndroidNativeWindowPtr"], "void*"),
# VK_KHR_win32_surface
(["HINSTANCE"], ["Win32InstanceHandle"], "void*"),
(["HWND"], ["Win32WindowHandle"], "void*"),
(["HANDLE"], ["Win32Handle"], "void*"),
(["const", "SECURITY_ATTRIBUTES", "*"], ["Win32SecurityAttributesPtr"], "const void*"),
(["AHardwareBuffer", "*"], ["AndroidHardwareBufferPtr"], "void*"),
# VK_EXT_acquire_xlib_display
(["RROutput"], ["RROutput"], "void*")
]
PLATFORM_TYPE_NAMESPACE = "pt"
TYPE_SUBSTITUTIONS = [
("uint8_t", "deUint8"),
("uint16_t", "deUint16"),
("uint32_t", "deUint32"),
("uint64_t", "deUint64"),
("int8_t", "deInt8"),
("int16_t", "deInt16"),
("int32_t", "deInt32"),
("int64_t", "deInt64"),
("bool32_t", "deUint32"),
("size_t", "deUintptr"),
# Platform-specific
("DWORD", "deUint32"),
("HANDLE*", PLATFORM_TYPE_NAMESPACE + "::" + "Win32Handle*"),
("LPCWSTR", "char*"),
]
EXTENSION_POSTFIXES = ["KHR", "EXT", "NV", "NVX", "KHX", "NN", "MVK"]
EXTENSION_POSTFIXES_STANDARD = ["KHR"]
def prefixName (prefix, name):
name = re.sub(r'([a-z0-9])([A-Z])', r'\1_\2', name[2:])
name = re.sub(r'([a-zA-Z])([0-9])', r'\1_\2', name)
name = name.upper()
name = name.replace("YCB_CR_", "YCBCR_")
name = name.replace("WIN_32_", "WIN32_")
name = name.replace("8_BIT_", "8BIT_")
name = name.replace("16_BIT_", "16BIT_")
name = name.replace("INT_64_", "INT64_")
name = name.replace("D_3_D_12_", "D3D12_")
name = name.replace("IOSSURFACE_", "IOS_SURFACE_")
name = name.replace("MAC_OS", "MACOS_")
name = name.replace("TEXTURE_LOD", "TEXTURE_LOD_")
name = name.replace("VIEWPORT_W", "VIEWPORT_W_")
name = name.replace("_IDPROPERTIES", "_ID_PROPERTIES")
name = name.replace("PHYSICAL_DEVICE_FLOAT_16_INT_8_FEATURES", "PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES")
return prefix + name
class Version:
def __init__ (self, versionTuple):
self.major = versionTuple[0]
self.minor = versionTuple[1]
self.patch = versionTuple[2]
def getInHex (self):
if self.major == 1 and self.minor == 0 and self.patch == 0:
return "VK_API_VERSION_1_0"
elif self.major == 1 and self.minor == 1 and self.patch == 0:
return "VK_API_VERSION_1_1"
else:
hex = (self.major << 22) | (self.minor << 12) | self.patch
return '0x%Xu' % (hex)
def isStandardVersion (self):
if self.patch != 0:
return False
if self.major != 1:
return False
if self.minor != 1 and self.minor != 0:
return False
return True
def getBestRepresentation (self):
if self.isStandardVersion():
return self.getInHex()
return self.getDefineName()
def getDefineName (self):
return 'VERSION_%d_%d_%d' % (self.major, self.minor, self.patch)
def __hash__ (self):
return (self.major << 22) | (self.minor << 12) | self.patch
def __eq__ (self, other):
return self.major == other.major and self.minor == other.minor and self.patch == other.patch
def __str__ (self):
return self.getBestRepresentation()
class Handle:
TYPE_DISP = 0
TYPE_NONDISP = 1
def __init__ (self, type, name):
self.type = type
self.name = name
self.alias = None
self.isAlias = False
def getHandleType (self):
return prefixName("HANDLE_TYPE_", self.name)
def checkAliasValidity (self):
pass
def __repr__ (self):
return '%s (%s, %s)' % (self.name, self.alias, self.isAlias)
class Definition:
def __init__ (self, type, name, value):
self.type = type
self.name = name
self.value = value
self.alias = None
self.isAlias = False
def __repr__ (self):
return '%s = %s (%s)' % (self.name, self.value, self.type)
class Enum:
def __init__ (self, name, values):
self.name = name
self.values = values
self.alias = None
self.isAlias = False
def checkAliasValidity (self):
if self.alias != None:
if len(self.values) != len(self.alias.values):
raise Exception("%s has different number of flags than its alias %s." % (self.name, self.alias.name))
for index, value in enumerate(self.values):
aliasVal = self.alias.values[index]
if value[1] != aliasVal[1] or not (value[0].startswith(aliasVal[0]) or aliasVal[0].startswith(value[0])):
raise Exception("Flag %s of %s has different value than %s of %s." % (self.alias.values[index], self.alias.name, value, self.name))
def __repr__ (self):
return '%s (%s) %s' % (self.name, self.alias, self.values)
class Bitfield:
def __init__ (self, name, values):
self.name = name
self.values = values
self.alias = None
self.isAlias = False
def checkAliasValidity (self):
if self.alias != None:
if len(self.values) != len(self.alias.values):
raise Exception("%s has different number of flags than its alias %s." % (self.name, self.alias.name))
for index, value in enumerate(self.values):
aliasVal = self.alias.values[index]
if value[1] != aliasVal[1] or not (value[0].startswith(aliasVal[0]) or aliasVal[0].startswith(value[0])):
raise Exception("Flag %s of %s has different value than %s of %s." % (self.alias.values[index], self.alias.name, value, self.name))
def __repr__ (self):
return '%s (%s)' % (self.name, self.alias)
class Variable:
def __init__ (self, type, name, arraySize):
type = type.replace('*',' *').replace('&',' &')
for src, dst in TYPE_SUBSTITUTIONS:
type = type.replace(src, dst)
self.type = type.split(' ')
for platformType, substitute, compat in PLATFORM_TYPES:
range = self.contains(self.type, platformType)
if range != None:
self.type = self.type[:range[0]]+[PLATFORM_TYPE_NAMESPACE + '::' + substitute[0]] + substitute[1:] + self.type[range[1]:]
break
self.name = name
self.arraySize = arraySize
def contains(self, big, small):
for i in xrange(len(big)-len(small)+1):
for j in xrange(len(small)):
if big[i+j] != small[j]:
break
else:
return i, i+len(small)
return None
def getType (self):
return ' '.join(self.type).replace(' *','*').replace(' &','&')
def getAsString (self, separator):
return '%s%s%s%s' % (self.getType(), separator, self.name, self.arraySize)
def __repr__ (self):
return '<%s> <%s> <%s>' % (self.type, self.name, self.arraySize)
def __eq__ (self, other):
if len(self.type) != len(other.type):
return False
for index, type in enumerate(self.type):
if "*" == type or "&" == type or "const" == type or "volatile" == type:
if type != other.type[index]:
return False
elif type != other.type[index] and \
type not in map(lambda ext: other.type[index] + ext, EXTENSION_POSTFIXES_STANDARD) and \
other.type[index] not in map(lambda ext: type + ext, EXTENSION_POSTFIXES_STANDARD):
return False
return self.arraySize == other.arraySize
def __ne__ (self, other):
return not self == other
class CompositeType:
CLASS_STRUCT = 0
CLASS_UNION = 1
def __init__ (self, typeClass, name, members):
self.typeClass = typeClass
self.name = name
self.members = members
self.alias = None
self.isAlias = False
def getClassName (self):
names = {CompositeType.CLASS_STRUCT: 'struct', CompositeType.CLASS_UNION: 'union'}
return names[self.typeClass]
def checkAliasValidity (self):
if self.alias != None:
if len(self.members) != len(self.alias.members):
raise Exception("%s has different number of members than its alias %s." % (self.name, self.alias.name))
for index, member in enumerate(self.members ):
break
#if member != self.alias.members[index]:
#raise Exception("Member %s of %s is different than core member %s in %s." % (self.alias.members[index], self.alias.name, member, self.name))
#raise Exception("Member ",str(self.alias.members[index])," of ", str(self.alias.name)," is different than core member ", str(member)," in ", str(self.name),".")
def __repr__ (self):
return '%s (%s)' % (self.name, self.alias)
class Function:
TYPE_PLATFORM = 0 # Not bound to anything
TYPE_INSTANCE = 1 # Bound to VkInstance
TYPE_DEVICE = 2 # Bound to VkDevice
def __init__ (self, name, returnType, arguments, apiVersion = None):
self.name = name
self.returnType = returnType
self.arguments = arguments
self.alias = None
self.isAlias = False
self.apiVersion = apiVersion
def getType (self):
# Special functions
if self.name == "vkGetInstanceProcAddr":
return Function.TYPE_PLATFORM
assert len(self.arguments) > 0
firstArgType = self.arguments[0].getType()
if firstArgType in ["VkInstance", "VkPhysicalDevice"]:
return Function.TYPE_INSTANCE
elif firstArgType in ["VkDevice", "VkCommandBuffer", "VkQueue"]:
return Function.TYPE_DEVICE
else:
return Function.TYPE_PLATFORM
def checkAliasValidity (self):
if self.alias != None:
if len(self.arguments) != len(self.alias.arguments):
raise Exception("%s has different number of arguments than its alias %s." % (self.name, self.alias.name))
if self.returnType != self.alias.returnType or not (self.returnType.startswith(self.alias.returnType) or self.alias.returnType.startswith(self.returnType)):
raise Exception("%s has different return value's type than its alias %s." % (self.name, self.alias.name))
for index, argument in enumerate(self.arguments):
if argument != self.alias.arguments[index]:
raise Exception("argument %s: \"%s\" of %s is different than \"%s\" of %s." % (index, self.alias.arguments[index].getAsString(' '), self.alias.name, argument.getAsString(' '), self.name))
def __repr__ (self):
return '%s (%s)' % (self.name, self.alias)
class Extension:
def __init__ (self, name, handles, enums, bitfields, compositeTypes, functions, definitions, additionalDefinitions, versionInCore):
self.name = name
self.definitions = definitions
self.additionalDefs = additionalDefinitions
self.handles = handles
self.enums = enums
self.bitfields = bitfields
self.compositeTypes = compositeTypes
self.functions = functions
self.versionInCore = versionInCore
def __repr__ (self):
return 'EXT:\n%s ->\nENUMS:\n%s\nCOMPOS:\n%s\nFUNCS:\n%s\nBITF:\n%s\nHAND:\n%s\nDEFS:\n%s\n' % (self.name, self.enums, self.compositeTypes, self.functions, self.bitfields, self.handles, self.definitions, self.versionInCore)
class API:
def __init__ (self, definitions, handles, enums, bitfields, compositeTypes, functions, extensions):
self.definitions = definitions
self.handles = handles
self.enums = enums
self.bitfields = bitfields
self.compositeTypes = compositeTypes
self.functions = functions # \note contains extension functions as well
self.extensions = extensions
def readFile (filename):
with open(filename, 'rb') as f:
return f.read()
IDENT_PTRN = r'[a-zA-Z_][a-zA-Z0-9_]*'
TYPE_PTRN = r'[a-zA-Z_][a-zA-Z0-9_ \t*&]*'
def fixupEnumValues (values):
fixed = []
for name, value in values:
if "_BEGIN_RANGE" in name or "_END_RANGE" in name:
continue
fixed.append((name, value))
return fixed
def getInterfaceName (function):
assert function.name[:2] == "vk"
return function.name[2].lower() + function.name[3:]
def getFunctionTypeName (function):
assert function.name[:2] == "vk"
return function.name[2:] + "Func"
def endsWith (str, postfix):
return str[-len(postfix):] == postfix
def splitNameExtPostfix (name):
knownExtPostfixes = EXTENSION_POSTFIXES
for postfix in knownExtPostfixes:
if endsWith(name, postfix):
return (name[:-len(postfix)], postfix)
return (name, "")
def getBitEnumNameForBitfield (bitfieldName):
bitfieldName, postfix = splitNameExtPostfix(bitfieldName)
assert bitfieldName[-1] == "s"
return bitfieldName[:-1] + "Bits" + postfix
def getBitfieldNameForBitEnum (bitEnumName):
bitEnumName, postfix = splitNameExtPostfix(bitEnumName)
assert bitEnumName[-4:] == "Bits"
return bitEnumName[:-4] + "s" + postfix
def parsePreprocDefinedValue (src, name):
value = parsePreprocDefinedValueOptional(src, name)
if value is None:
raise Exception("No such definition: %s" % name)
return value
def parsePreprocDefinedValueOptional (src, name):
definition = re.search(r'#\s*define\s+' + name + r'\s+([^\n]+)\n', src)
if definition is None:
return None
value = definition.group(1).strip()
if value == "UINT32_MAX":
value = "(~0u)"
return value
def parseEnum (name, src):
keyValuePtrn = '(' + IDENT_PTRN + r')\s*=\s*([^\s,}]+)\s*[,}]'
matches = re.findall(keyValuePtrn, src)
return Enum(name, fixupEnumValues(matches))
# \note Parses raw enums, some are mapped to bitfields later
def parseEnums (src):
matches = re.findall(r'typedef enum(\s*' + IDENT_PTRN + r')?\s*{([^}]*)}\s*(' + IDENT_PTRN + r')\s*;', src)
enums = []
for enumname, contents, typename in matches:
enums.append(parseEnum(typename, contents))
return enums
def parseCompositeType (type, name, src):
typeNamePtrn = r'(' + TYPE_PTRN + r')(\s+' + IDENT_PTRN + r')((\[[^\]]+\])*)\s*;'
matches = re.findall(typeNamePtrn, src)
members = [Variable(t.strip(), n.strip(), a.strip()) for t, n, a, _ in matches]
return CompositeType(type, name, members)
def parseCompositeTypes (src):
typeMap = { 'struct': CompositeType.CLASS_STRUCT, 'union': CompositeType.CLASS_UNION }
matches = re.findall(r'typedef (struct|union)(\s*' + IDENT_PTRN + r')?\s*{([^}]*)}\s*(' + IDENT_PTRN + r')\s*;', src)
types = []
for type, structname, contents, typename in matches:
types.append(parseCompositeType(typeMap[type], typename, contents))
return types
def parseHandles (src):
matches = re.findall(r'VK_DEFINE(_NON_DISPATCHABLE|)_HANDLE\((' + IDENT_PTRN + r')\)[ \t]*[\n\r]', src)
handles = []
typeMap = {'': Handle.TYPE_DISP, '_NON_DISPATCHABLE': Handle.TYPE_NONDISP}
for type, name in matches:
handle = Handle(typeMap[type], name)
handles.append(handle)
return handles
def parseArgList (src):
typeNamePtrn = r'(' + TYPE_PTRN + r')(\s+' + IDENT_PTRN + r')((\[[^\]]+\])*)\s*'
args = []
for rawArg in src.split(','):
m = re.search(typeNamePtrn, rawArg)
args.append(Variable(m.group(1).strip(), m.group(2).strip(), m.group(3)))
return args
def removeTypeExtPostfix (name):
for extPostfix in EXTENSION_POSTFIXES_STANDARD:
if endsWith(name, extPostfix):
return name[0:-len(extPostfix)]
return None
def populateAliases (objects):
objectsByName = {}
for object in objects:
objectsByName[object.name] = object
for object in objects:
withoutPostfix = removeTypeExtPostfix(object.name)
if withoutPostfix != None and withoutPostfix in objectsByName:
objectsByName[withoutPostfix].alias = object
object.isAlias = True
for object in objects:
object.checkAliasValidity()
def populateAliasesWithTypedefs (objects, src):
objectsByName = {}
for object in objects:
objectsByName[object.name] = object
ptrn = r'\s*typedef\s+' + object.name + r'\s+([^;]+)'
stash = re.findall(ptrn, src)
if len(stash) == 1:
objExt = copy.deepcopy(object)
objExt.name = stash[0]
object.alias = objExt
objExt.isAlias = True
objects.append(objExt)
def removeAliasedValues (enum):
valueByName = {}
for name, value in enum.values:
valueByName[name] = value
def removeDefExtPostfix (name):
for extPostfix in EXTENSION_POSTFIXES:
if endsWith(name, "_" + extPostfix):
return name[0:-(len(extPostfix)+1)]
return None
newValues = []
for name, value in enum.values:
withoutPostfix = removeDefExtPostfix(name)
if withoutPostfix != None and withoutPostfix in valueByName and valueByName[withoutPostfix] == value:
continue
newValues.append((name, value))
enum.values = newValues
def parseFunctions (src):
ptrn = r'VKAPI_ATTR\s+(' + TYPE_PTRN + ')\s+VKAPI_CALL\s+(' + IDENT_PTRN + r')\s*\(([^)]*)\)\s*;'
matches = re.findall(ptrn, src)
functions = []
for returnType, name, argList in matches:
functions.append(Function(name.strip(), returnType.strip(), parseArgList(argList)))
return functions
def parseFunctionsByVersion (src):
ptrnVer10 = 'VK_VERSION_1_0 1'
ptrnVer11 = 'VK_VERSION_1_1 1'
matchVer10 = re.search(ptrnVer10, src)
matchVer11 = re.search(ptrnVer11, src)
ptrn = r'VKAPI_ATTR\s+(' + TYPE_PTRN + ')\s+VKAPI_CALL\s+(' + IDENT_PTRN + r')\s*\(([^)]*)\)\s*;'
regPtrn = re.compile(ptrn)
matches = regPtrn.findall(src, matchVer10.start(), matchVer11.start())
functions = []
for returnType, name, argList in matches:
functions.append(Function(name.strip(), returnType.strip(), parseArgList(argList), 'VK_VERSION_1_0'))
matches = regPtrn.findall(src, matchVer11.start())
for returnType, name, argList in matches:
functions.append(Function(name.strip(), returnType.strip(), parseArgList(argList), 'VK_VERSION_1_1'))
return functions
def splitByExtension (src):
ptrn = r'#define\s+[A-Z0-9_]+_EXTENSION_NAME\s+"([^"]+)"'
match = "#define\s+("
for part in re.finditer(ptrn, src):
match += part.group(1)+"|"
match = match[:-1] + ")\s+1"
parts = re.split(match, src)
# First part is core
byExtension = [(None, parts[0])]
for ndx in range(1, len(parts), 2):
byExtension.append((parts[ndx], parts[ndx+1]))
return byExtension
def parseDefinitions (extensionName, src):
def skipDefinition (extensionName, definition):
if extensionName == None:
return True
# SPEC_VERSION enums
if definition[0].startswith(extensionName.upper()) and definition[1].isdigit():
return False
if definition[0].startswith(extensionName.upper()):
return True
if definition[1].isdigit():
return True
return False
ptrn = r'#define\s+([^\s]+)\s+([^\r\n]+)'
matches = re.findall(ptrn, src)
return [Definition(None, match[0], match[1]) for match in matches if not skipDefinition(extensionName, match)]
def parseExtensions (src, allFunctions, allCompositeTypes, allEnums, allBitfields, allHandles, allDefinitions):
def getCoreVersion (extensionTuple):
if not extensionTuple[0]:
return None
ptrn = r'\/\/\s*' + extensionTuple[0] + r'\s+(DEVICE|INSTANCE)\s+([0-9_]+)'
coreVersion = re.search(ptrn, extensionTuple[1], re.I)
if coreVersion != None:
return [coreVersion.group(1)] + [int(number) for number in coreVersion.group(2).split('_')[:3]]
return None
splitSrc = splitByExtension(src)
extensions = []
functionsByName = {function.name: function for function in allFunctions}
compositeTypesByName = {compType.name: compType for compType in allCompositeTypes}
enumsByName = {enum.name: enum for enum in allEnums}
bitfieldsByName = {bitfield.name: bitfield for bitfield in allBitfields}
handlesByName = {handle.name: handle for handle in allHandles}
definitionsByName = {definition.name: definition for definition in allDefinitions}
for extensionName, extensionSrc in splitSrc:
definitions = [Definition(type, name, parsePreprocDefinedValueOptional(extensionSrc, name)) for name, type in DEFINITIONS]
definitions = [definition for definition in definitions if definition.value != None]
additionalDefinitions = parseDefinitions(extensionName, extensionSrc)
handles = parseHandles(extensionSrc)
functions = parseFunctions(extensionSrc)
compositeTypes = parseCompositeTypes(extensionSrc)
rawEnums = parseEnums(extensionSrc)
bitfieldNames = parseBitfieldNames(extensionSrc)
enumBitfieldNames = [getBitEnumNameForBitfield(name) for name in bitfieldNames]
enums = [enum for enum in rawEnums if enum.name not in enumBitfieldNames]
extCoreVersion = getCoreVersion((extensionName, extensionSrc))
extFunctions = [functionsByName[function.name] for function in functions]
extCompositeTypes = [compositeTypesByName[compositeType.name] for compositeType in compositeTypes]
extEnums = [enumsByName[enum.name] for enum in enums]
extBitfields = [bitfieldsByName[bitfieldName] for bitfieldName in bitfieldNames]
extHandles = [handlesByName[handle.name] for handle in handles]
extDefinitions = [definitionsByName[definition.name] for definition in definitions]
extensions.append(Extension(extensionName, extHandles, extEnums, extBitfields, extCompositeTypes, extFunctions, extDefinitions, additionalDefinitions, extCoreVersion))
return extensions
def parseBitfieldNames (src):
ptrn = r'typedef\s+VkFlags\s(' + IDENT_PTRN + r')\s*;'
matches = re.findall(ptrn, src)
return matches
def parseAPI (src):
definitions = [Definition(type, name, parsePreprocDefinedValue(src, name)) for name, type in DEFINITIONS]
handles = parseHandles(src)
rawEnums = parseEnums(src)
bitfieldNames = parseBitfieldNames(src)
enums = []
bitfields = []
bitfieldEnums = set([getBitEnumNameForBitfield(n) for n in bitfieldNames if getBitEnumNameForBitfield(n) in [enum.name for enum in rawEnums]])
compositeTypes = parseCompositeTypes(src)
allFunctions = parseFunctionsByVersion(src)
for enum in rawEnums:
if enum.name in bitfieldEnums:
bitfields.append(Bitfield(getBitfieldNameForBitEnum(enum.name), enum.values))
else:
enums.append(enum)
for bitfieldName in bitfieldNames:
if not bitfieldName in [bitfield.name for bitfield in bitfields]:
# Add empty bitfield
bitfields.append(Bitfield(bitfieldName, []))
# Populate alias fields
populateAliasesWithTypedefs(compositeTypes, src)
populateAliasesWithTypedefs(enums, src)
populateAliasesWithTypedefs(bitfields, src)
populateAliases(allFunctions)
populateAliases(handles)
populateAliases(enums)
populateAliases(bitfields)
populateAliases(compositeTypes)
for enum in enums:
removeAliasedValues(enum)
extensions = parseExtensions(src, allFunctions, compositeTypes, enums, bitfields, handles, definitions)
return API(
definitions = definitions,
handles = handles,
enums = enums,
bitfields = bitfields,
compositeTypes = compositeTypes,
functions = allFunctions,
extensions = extensions)
def splitUniqueAndDuplicatedEntries (handles):
listOfUniqueHandles = []
duplicates = OrderedDict()
for handle in handles:
if handle.alias != None:
duplicates[handle.alias] = handle
if not handle.isAlias:
listOfUniqueHandles.append(handle)
return listOfUniqueHandles, duplicates
def writeHandleType (api, filename):
uniqeHandles, duplicatedHandles = splitUniqueAndDuplicatedEntries(api.handles)
def genHandles ():
yield "\t%s\t= 0," % uniqeHandles[0].getHandleType()
for handle in uniqeHandles[1:]:
yield "\t%s," % handle.getHandleType()
for duplicate in duplicatedHandles:
yield "\t%s\t= %s," % (duplicate.getHandleType(), duplicatedHandles[duplicate].getHandleType())
yield "\tHANDLE_TYPE_LAST\t= %s + 1" % (uniqeHandles[-1].getHandleType())
def genHandlesBlock ():
yield "enum HandleType"
yield "{"
for line in indentLines(genHandles()):
yield line
yield "};"
yield ""
writeInlFile(filename, INL_HEADER, genHandlesBlock())
def getEnumValuePrefix (enum):
prefix = enum.name[0]
for i in range(1, len(enum.name)):
if enum.name[i].isupper() and not enum.name[i-1].isupper():
prefix += "_"
prefix += enum.name[i].upper()
return prefix
def parseInt (value):
if value[:2] == "0x":
return int(value, 16)
else:
return int(value, 10)
def areEnumValuesLinear (enum):
curIndex = 0
for name, value in enum.values:
if parseInt(value) != curIndex:
return False
curIndex += 1
return True
def genEnumSrc (enum):
yield "enum %s" % enum.name
yield "{"
for line in indentLines(["\t%s\t= %s," % v for v in enum.values]):
yield line
if areEnumValuesLinear(enum):
yield ""
yield "\t%s_LAST" % getEnumValuePrefix(enum)
yield "};"
def genBitfieldSrc (bitfield):
if len(bitfield.values) > 0:
yield "enum %s" % getBitEnumNameForBitfield(bitfield.name)
yield "{"
for line in indentLines(["\t%s\t= %s," % v for v in bitfield.values]):
yield line
yield "};"
yield "typedef deUint32 %s;" % bitfield.name
def genCompositeTypeSrc (type):
yield "%s %s" % (type.getClassName(), type.name)
yield "{"
for line in indentLines(['\t'+m.getAsString('\t')+';' for m in type.members]):
yield line
yield "};"
def genHandlesSrc (handles):
uniqeHandles, duplicatedHandles = splitUniqueAndDuplicatedEntries(handles)
def genLines (handles):
for handle in uniqeHandles:
if handle.type == Handle.TYPE_DISP:
yield "VK_DEFINE_HANDLE\t(%s,\t%s);" % (handle.name, handle.getHandleType())
elif handle.type == Handle.TYPE_NONDISP:
yield "VK_DEFINE_NON_DISPATCHABLE_HANDLE\t(%s,\t%s);" % (handle.name, handle.getHandleType())
for duplicate in duplicatedHandles:
if duplicate.type == Handle.TYPE_DISP:
yield "VK_DEFINE_HANDLE\t(%s,\t%s);" % (duplicate.name, duplicatedHandles[duplicate].getHandleType())
elif duplicate.type == Handle.TYPE_NONDISP:
yield "VK_DEFINE_NON_DISPATCHABLE_HANDLE\t(%s,\t%s);" % (duplicate.name, duplicatedHandles[duplicate].getHandleType())
for line in indentLines(genLines(handles)):
yield line
def genDefinitionsSrc (definitions):
for line in ["#define %s\t(static_cast<%s>\t(%s))" % (definition.name, definition.type, definition.value) for definition in definitions]:
yield line
def genDefinitionsAliasSrc (definitions):
for line in ["#define %s\t%s" % (definition.name, definitions[definition].name) for definition in definitions]:
if definition.value != definitions[definition].value and definition.value != definitions[definition].name:
raise Exception("Value of %s (%s) is different than core definition value %s (%s)." % (definition.name, definition.value, definitions[definition].name, definitions[definition].value))
yield line
def writeBasicTypes (api, filename):
def gen ():
definitionsCore, definitionDuplicates = splitUniqueAndDuplicatedEntries(api.definitions)
for line in indentLines(chain(genDefinitionsSrc(definitionsCore), genDefinitionsAliasSrc(definitionDuplicates))):
yield line
yield ""
for line in genHandlesSrc(api.handles):
yield line
yield ""
for enum in api.enums:
if not enum.isAlias:
for line in genEnumSrc(enum):
yield line
yield ""
for bitfield in api.bitfields:
if not bitfield.isAlias:
for line in genBitfieldSrc(bitfield):
yield line
yield ""
for line in indentLines(["VK_DEFINE_PLATFORM_TYPE(%s,\t%s);" % (s[0], c) for n, s, c in PLATFORM_TYPES]):
yield line
for ext in api.extensions:
if ext.additionalDefs != None:
for definition in ext.additionalDefs:
yield "#define " + definition.name + " " + definition.value
writeInlFile(filename, INL_HEADER, gen())
def writeCompositeTypes (api, filename):
def gen ():
for type in api.compositeTypes:
type.checkAliasValidity()
if not type.isAlias:
for line in genCompositeTypeSrc(type):
yield line
yield ""
writeInlFile(filename, INL_HEADER, gen())
def argListToStr (args):
return ", ".join(v.getAsString(' ') for v in args)
def writeInterfaceDecl (api, filename, functionTypes, concrete):
def genProtos ():
postfix = "" if concrete else " = 0"
for function in api.functions:
if not function.getType() in functionTypes:
continue
if not function.isAlias:
yield "virtual %s\t%s\t(%s) const%s;" % (function.returnType, getInterfaceName(function), argListToStr(function.arguments), postfix)
writeInlFile(filename, INL_HEADER, indentLines(genProtos()))
def writeFunctionPtrTypes (api, filename):
def genTypes ():
for function in api.functions:
yield "typedef VKAPI_ATTR %s\t(VKAPI_CALL* %s)\t(%s);" % (function.returnType, getFunctionTypeName(function), argListToStr(function.arguments))
writeInlFile(filename, INL_HEADER, indentLines(genTypes()))
def writeFunctionPointers (api, filename, functionTypes):
def FunctionsYielder ():
for function in api.functions:
if function.getType() in functionTypes:
if function.isAlias:
if function.getType() == Function.TYPE_INSTANCE and function.arguments[0].getType() == "VkPhysicalDevice":
yield "%s\t%s;" % (getFunctionTypeName(function), getInterfaceName(function))
else:
yield "%s\t%s;" % (getFunctionTypeName(function), getInterfaceName(function))
writeInlFile(filename, INL_HEADER, indentLines(FunctionsYielder()))
def writeInitFunctionPointers (api, filename, functionTypes, cond = None):
def makeInitFunctionPointers ():
for function in api.functions:
if function.getType() in functionTypes and (cond == None or cond(function)):
interfaceName = getInterfaceName(function)
if function.isAlias:
if function.getType() == Function.TYPE_INSTANCE and function.arguments[0].getType() == "VkPhysicalDevice":
yield "m_vk.%s\t= (%s)\tGET_PROC_ADDR(\"%s\");" % (getInterfaceName(function), getFunctionTypeName(function), function.name)
else:
yield "m_vk.%s\t= (%s)\tGET_PROC_ADDR(\"%s\");" % (getInterfaceName(function), getFunctionTypeName(function), function.name)
if function.alias != None:
yield "if (!m_vk.%s)" % (getInterfaceName(function))
yield " m_vk.%s\t= (%s)\tGET_PROC_ADDR(\"%s\");" % (getInterfaceName(function), getFunctionTypeName(function), function.alias.name)
lines = [line.replace(' ', '\t') for line in indentLines(makeInitFunctionPointers())]
writeInlFile(filename, INL_HEADER, lines)
def writeFuncPtrInterfaceImpl (api, filename, functionTypes, className):
def makeFuncPtrInterfaceImpl ():
for function in api.functions:
if function.getType() in functionTypes and not function.isAlias:
yield ""
yield "%s %s::%s (%s) const" % (function.returnType, className, getInterfaceName(function), argListToStr(function.arguments))
yield "{"
if function.name == "vkEnumerateInstanceVersion":
yield " if (m_vk.enumerateInstanceVersion)"
yield " return m_vk.enumerateInstanceVersion(pApiVersion);"
yield ""
yield " *pApiVersion = VK_API_VERSION_1_0;"
yield " return VK_SUCCESS;"
elif function.getType() == Function.TYPE_INSTANCE and function.arguments[0].getType() == "VkPhysicalDevice" and function.alias != None:
yield " vk::VkPhysicalDeviceProperties props;"
yield " m_vk.getPhysicalDeviceProperties(physicalDevice, &props);"
yield " if (props.apiVersion >= VK_API_VERSION_1_1)"
yield " %sm_vk.%s(%s);" % ("return " if function.returnType != "void" else "", getInterfaceName(function), ", ".join(a.name for a in function.arguments))
yield " else"
yield " %sm_vk.%s(%s);" % ("return " if function.returnType != "void" else "", getInterfaceName(function.alias), ", ".join(a.name for a in function.arguments))
else:
yield " %sm_vk.%s(%s);" % ("return " if function.returnType != "void" else "", getInterfaceName(function), ", ".join(a.name for a in function.arguments))
yield "}"
writeInlFile(filename, INL_HEADER, makeFuncPtrInterfaceImpl())
def writeStrUtilProto (api, filename):
def makeStrUtilProto ():
for line in indentLines(["const char*\tget%sName\t(%s value);" % (enum.name[2:], enum.name) for enum in api.enums if not enum.isAlias]):
yield line
yield ""
for line in indentLines(["inline tcu::Format::Enum<%s>\tget%sStr\t(%s value)\t{ return tcu::Format::Enum<%s>(get%sName, value);\t}" % (e.name, e.name[2:], e.name, e.name, e.name[2:]) for e in api.enums if not e.isAlias]):
yield line
yield ""
for line in indentLines(["inline std::ostream&\toperator<<\t(std::ostream& s, %s value)\t{ return s << get%sStr(value);\t}" % (e.name, e.name[2:]) for e in api.enums if not e.isAlias]):
yield line
yield ""
for line in indentLines(["tcu::Format::Bitfield<32>\tget%sStr\t(%s value);" % (bitfield.name[2:], bitfield.name) for bitfield in api.bitfields if not bitfield.isAlias]):
yield line
yield ""
for line in indentLines(["std::ostream&\toperator<<\t(std::ostream& s, const %s& value);" % (s.name) for s in api.compositeTypes if not s.isAlias]):
yield line
writeInlFile(filename, INL_HEADER, makeStrUtilProto())
def writeStrUtilImpl (api, filename):
def makeStrUtilImpl ():
for line in indentLines(["template<> const char*\tgetTypeName<%s>\t(void) { return \"%s\";\t}" % (handle.name, handle.name) for handle in api.handles if not handle.isAlias]):
yield line
yield ""
yield "namespace %s" % PLATFORM_TYPE_NAMESPACE
yield "{"
for line in indentLines("std::ostream& operator<< (std::ostream& s, %s\tv) { return s << tcu::toHex(v.internal); }" % ''.join(s) for n, s, c in PLATFORM_TYPES):
yield line
yield "}"
for enum in api.enums:
if enum.isAlias:
continue
yield ""
yield "const char* get%sName (%s value)" % (enum.name[2:], enum.name)
yield "{"
yield "\tswitch (value)"
yield "\t{"
for line in indentLines(["\t\tcase %s:\treturn \"%s\";" % (n, n) for n, v in enum.values] + ["\t\tdefault:\treturn DE_NULL;"]):
yield line
yield "\t}"
yield "}"
for bitfield in api.bitfields:
if bitfield.isAlias:
continue
yield ""
yield "tcu::Format::Bitfield<32> get%sStr (%s value)" % (bitfield.name[2:], bitfield.name)
yield "{"
if len(bitfield.values) > 0:
yield "\tstatic const tcu::Format::BitDesc s_desc[] ="
yield "\t{"
for line in indentLines(["\t\ttcu::Format::BitDesc(%s,\t\"%s\")," % (n, n) for n, v in bitfield.values]):
yield line
yield "\t};"
yield "\treturn tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));"
else:
yield "\treturn tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);"
yield "}"
bitfieldTypeNames = set([bitfield.name for bitfield in api.bitfields])
for type in api.compositeTypes:
if not type.isAlias:
yield ""
yield "std::ostream& operator<< (std::ostream& s, const %s& value)" % type.name
yield "{"
yield "\ts << \"%s = {\\n\";" % type.name
for member in type.members:
memberName = member.name
valFmt = None
newLine = ""
if member.getType() in bitfieldTypeNames:
valFmt = "get%sStr(value.%s)" % (member.getType()[2:], member.name)
elif member.getType() == "const char*" or member.getType() == "char*":
valFmt = "getCharPtrStr(value.%s)" % member.name
elif member.arraySize != '':
if member.name in ["extensionName", "deviceName", "layerName", "description"]:
valFmt = "(const char*)value.%s" % member.name
elif member.getType() == 'char' or member.getType() == 'deUint8':
newLine = "'\\n' << "
valFmt = "tcu::formatArray(tcu::Format::HexIterator<%s>(DE_ARRAY_BEGIN(value.%s)), tcu::Format::HexIterator<%s>(DE_ARRAY_END(value.%s)))" % (member.getType(), member.name, member.getType(), member.name)
else:
if member.name == "memoryTypes" or member.name == "memoryHeaps":
endIter = "DE_ARRAY_BEGIN(value.%s) + value.%sCount" % (member.name, member.name[:-1])
else:
endIter = "DE_ARRAY_END(value.%s)" % member.name
newLine = "'\\n' << "
valFmt = "tcu::formatArray(DE_ARRAY_BEGIN(value.%s), %s)" % (member.name, endIter)
memberName = member.name
else:
valFmt = "value.%s" % member.name
yield ("\ts << \"\\t%s = \" << " % memberName) + newLine + valFmt + " << '\\n';"
yield "\ts << '}';"
yield "\treturn s;"
yield "}"
writeInlFile(filename, INL_HEADER, makeStrUtilImpl())
class ConstructorFunction:
def __init__ (self, type, name, objectType, ifaceArgs, arguments):
self.type = type
self.name = name
self.objectType = objectType
self.ifaceArgs = ifaceArgs
self.arguments = arguments
def getConstructorFunctions (api):
funcs = []
ifacesDict = {
Function.TYPE_PLATFORM: [Variable("const PlatformInterface&", "vk", "")],
Function.TYPE_INSTANCE: [Variable("const InstanceInterface&", "vk", "")],
Function.TYPE_DEVICE: [Variable("const DeviceInterface&", "vk", "")]
}
for function in api.functions:
if function.isAlias:
continue
if (function.name[:8] == "vkCreate" or function.name == "vkAllocateMemory") and not "createInfoCount" in [a.name for a in function.arguments]:
if function.name == "vkCreateDisplayModeKHR":
continue # No way to delete display modes (bug?)
# \todo [pyry] Rather hacky
ifaceArgs = ifacesDict[function.getType()]
if function.name == "vkCreateDevice":
ifaceArgs = [Variable("const PlatformInterface&", "vkp", ""), Variable("VkInstance", "instance", "")] + ifaceArgs
assert (function.arguments[-2].type == ["const", "VkAllocationCallbacks", "*"])
objectType = function.arguments[-1].type[0] #not getType() but type[0] on purpose
arguments = function.arguments[:-1]
funcs.append(ConstructorFunction(function.getType(), getInterfaceName(function), objectType, ifaceArgs, arguments))
return funcs
def addVersionDefines(versionSpectrum):
output = ["#define " + ver.getDefineName() + " " + ver.getInHex() for ver in versionSpectrum if not ver.isStandardVersion()]
return output
def removeVersionDefines(versionSpectrum):
output = ["#undef " + ver.getDefineName() for ver in versionSpectrum if not ver.isStandardVersion()]
return output
def writeRefUtilProto (api, filename):
functions = getConstructorFunctions(api)
def makeRefUtilProto ():
unindented = []
for line in indentLines(["Move<%s>\t%s\t(%s = DE_NULL);" % (function.objectType, function.name, argListToStr(function.ifaceArgs + function.arguments)) for function in functions]):
yield line
writeInlFile(filename, INL_HEADER, makeRefUtilProto())
def writeRefUtilImpl (api, filename):
functions = getConstructorFunctions(api)
def makeRefUtilImpl ():
yield "namespace refdetails"
yield "{"
yield ""
for function in api.functions:
if function.getType() == Function.TYPE_DEVICE \
and (function.name[:9] == "vkDestroy" or function.name == "vkFreeMemory") \
and not function.name == "vkDestroyDevice" \
and not function.isAlias:
objectType = function.arguments[-2].getType()
yield "template<>"
yield "void Deleter<%s>::operator() (%s obj) const" % (objectType, objectType)
yield "{"
yield "\tm_deviceIface->%s(m_device, obj, m_allocator);" % (getInterfaceName(function))
yield "}"
yield ""
yield "} // refdetails"
yield ""
dtorDict = {
Function.TYPE_PLATFORM: "object",
Function.TYPE_INSTANCE: "instance",
Function.TYPE_DEVICE: "device"
}
for function in functions:
deleterArgsString = ''
if function.name == "createDevice":
# createDevice requires two additional parameters to setup VkDevice deleter
deleterArgsString = "vkp, instance, object, " + function.arguments[-1].name
else:
deleterArgsString = "vk, %s, %s" % (dtorDict[function.type], function.arguments[-1].name)
yield "Move<%s> %s (%s)" % (function.objectType, function.name, argListToStr(function.ifaceArgs + function.arguments))
yield "{"
yield "\t%s object = 0;" % function.objectType
yield "\tVK_CHECK(vk.%s(%s));" % (function.name, ", ".join([a.name for a in function.arguments] + ["&object"]))
yield "\treturn Move<%s>(check<%s>(object), Deleter<%s>(%s));" % (function.objectType, function.objectType, function.objectType, deleterArgsString)
yield "}"
yield ""
writeInlFile(filename, INL_HEADER, makeRefUtilImpl())
def writeStructTraitsImpl (api, filename):
def gen ():
for type in api.compositeTypes:
if type.getClassName() == "struct" and type.members[0].name == "sType" and not type.isAlias:
yield "template<> VkStructureType getStructureType<%s> (void)" % type.name
yield "{"
yield "\treturn %s;" % prefixName("VK_STRUCTURE_TYPE_", type.name)
yield "}"
yield ""
writeInlFile(filename, INL_HEADER, gen())
def writeNullDriverImpl (api, filename):
def genNullDriverImpl ():
specialFuncNames = [
"vkCreateGraphicsPipelines",
"vkCreateComputePipelines",
"vkGetInstanceProcAddr",
"vkGetDeviceProcAddr",
"vkEnumeratePhysicalDevices",
"vkEnumerateInstanceExtensionProperties",
"vkEnumerateDeviceExtensionProperties",
"vkGetPhysicalDeviceFeatures",
"vkGetPhysicalDeviceFeatures2KHR",
"vkGetPhysicalDeviceProperties",
"vkGetPhysicalDeviceProperties2KHR",
"vkGetPhysicalDeviceQueueFamilyProperties",
"vkGetPhysicalDeviceMemoryProperties",
"vkGetPhysicalDeviceFormatProperties",
"vkGetPhysicalDeviceImageFormatProperties",
"vkGetDeviceQueue",
"vkGetBufferMemoryRequirements",
"vkGetBufferMemoryRequirements2KHR",
"vkGetImageMemoryRequirements",
"vkGetImageMemoryRequirements2KHR",
"vkAllocateMemory",
"vkMapMemory",
"vkUnmapMemory",
"vkAllocateDescriptorSets",
"vkFreeDescriptorSets",
"vkResetDescriptorPool",
"vkAllocateCommandBuffers",
"vkFreeCommandBuffers",
"vkCreateDisplayModeKHR",
"vkCreateSharedSwapchainsKHR",
"vkGetPhysicalDeviceExternalBufferPropertiesKHR",
"vkGetPhysicalDeviceImageFormatProperties2KHR",
"vkGetMemoryAndroidHardwareBufferANDROID",
]
coreFunctions = [f for f in api.functions if not f.isAlias]
specialFuncs = [f for f in coreFunctions if f.name in specialFuncNames]
createFuncs = [f for f in coreFunctions if (f.name[:8] == "vkCreate" or f.name == "vkAllocateMemory") and not f in specialFuncs]
destroyFuncs = [f for f in coreFunctions if (f.name[:9] == "vkDestroy" or f.name == "vkFreeMemory") and not f in specialFuncs]
dummyFuncs = [f for f in coreFunctions if f not in specialFuncs + createFuncs + destroyFuncs]
def getHandle (name):
for handle in api.handles:
if handle.name == name[0]:
return handle
raise Exception("No such handle: %s" % name)
for function in createFuncs:
objectType = function.arguments[-1].type[:-1]
argsStr = ", ".join([a.name for a in function.arguments[:-1]])
yield "VKAPI_ATTR %s VKAPI_CALL %s (%s)" % (function.returnType, getInterfaceName(function), argListToStr(function.arguments))
yield "{"
yield "\tDE_UNREF(%s);" % function.arguments[-2].name
if getHandle(objectType).type == Handle.TYPE_NONDISP:
yield "\tVK_NULL_RETURN((*%s = allocateNonDispHandle<%s, %s>(%s)));" % (function.arguments[-1].name, objectType[0][2:], objectType[0], argsStr)
else:
yield "\tVK_NULL_RETURN((*%s = allocateHandle<%s, %s>(%s)));" % (function.arguments[-1].name, objectType[0][2:], objectType[0], argsStr)
yield "}"
yield ""
for function in destroyFuncs:
objectArg = function.arguments[-2]
yield "VKAPI_ATTR %s VKAPI_CALL %s (%s)" % (function.returnType, getInterfaceName(function), argListToStr(function.arguments))
yield "{"
for arg in function.arguments[:-2]:
yield "\tDE_UNREF(%s);" % arg.name
if getHandle(objectArg.type).type == Handle.TYPE_NONDISP:
yield "\tfreeNonDispHandle<%s, %s>(%s, %s);" % (objectArg.getType()[2:], objectArg.getType(), objectArg.name, function.arguments[-1].name)
else:
yield "\tfreeHandle<%s, %s>(%s, %s);" % (objectArg.getType()[2:], objectArg.getType(), objectArg.name, function.arguments[-1].name)
yield "}"
yield ""
for function in dummyFuncs:
yield "VKAPI_ATTR %s VKAPI_CALL %s (%s)" % (function.returnType, getInterfaceName(function), argListToStr(function.arguments))
yield "{"
for arg in function.arguments:
yield "\tDE_UNREF(%s);" % arg.name
if function.returnType != "void":
yield "\treturn VK_SUCCESS;"
yield "}"
yield ""
def genFuncEntryTable (type, name):
funcs = [f for f in api.functions if f.getType() == type]
refFuncs = {}
for f in api.functions:
if f.alias != None:
refFuncs[f.alias] = f
yield "static const tcu::StaticFunctionLibrary::Entry %s[] =" % name
yield "{"
for line in indentLines(["\tVK_NULL_FUNC_ENTRY(%s,\t%s)," % (function.name, getInterfaceName(function if not function.isAlias else refFuncs[function])) for function in funcs]):
yield line
yield "};"
yield ""
# Func tables
for line in genFuncEntryTable(Function.TYPE_PLATFORM, "s_platformFunctions"):
yield line
for line in genFuncEntryTable(Function.TYPE_INSTANCE, "s_instanceFunctions"):
yield line
for line in genFuncEntryTable(Function.TYPE_DEVICE, "s_deviceFunctions"):
yield line
writeInlFile(filename, INL_HEADER, genNullDriverImpl())
def writeTypeUtil (api, filename):
# Structs filled by API queries are not often used in test code
QUERY_RESULT_TYPES = set([
"VkPhysicalDeviceFeatures",
"VkPhysicalDeviceLimits",
"VkFormatProperties",
"VkImageFormatProperties",
"VkPhysicalDeviceSparseProperties",
"VkQueueFamilyProperties",
"VkMemoryType",
"VkMemoryHeap",
])
COMPOSITE_TYPES = set([t.name for t in api.compositeTypes if not t.isAlias])
def isSimpleStruct (type):
def hasArrayMember (type):
for member in type.members:
if member.arraySize != '':
return True
return False
def hasCompositeMember (type):
for member in type.members:
if member.getType() in COMPOSITE_TYPES:
return True
return False
return type.typeClass == CompositeType.CLASS_STRUCT and \
type.members[0].getType() != "VkStructureType" and \
not type.name in QUERY_RESULT_TYPES and \
not hasArrayMember(type) and \
not hasCompositeMember(type)
def gen ():
for type in api.compositeTypes:
if not isSimpleStruct(type) or type.isAlias:
continue
yield ""
yield "inline %s make%s (%s)" % (type.name, type.name[2:], argListToStr(type.members))
yield "{"
yield "\t%s res;" % type.name
for line in indentLines(["\tres.%s\t= %s;" % (m.name, m.name) for m in type.members]):
yield line
yield "\treturn res;"
yield "}"
writeInlFile(filename, INL_HEADER, gen())
def writeSupportedExtenions(api, filename):
def writeExtensionsForVersions(map):
result = []
for version in map:
result.append(" if (coreVersion >= " + str(version) + ")")
result.append(" {")
for extension in map[version]:
result.append(' dst.push_back("' + extension.name + '");')
result.append(" }")
return result
instanceMap = {}
deviceMap = {}
versionSet = set()
for ext in api.extensions:
if ext.versionInCore != None:
if ext.versionInCore[0] == 'INSTANCE':
list = instanceMap.get(Version(ext.versionInCore[1:]))
instanceMap[Version(ext.versionInCore[1:])] = list + [ext] if list else [ext]
else:
list = deviceMap.get(Version(ext.versionInCore[1:]))
deviceMap[Version(ext.versionInCore[1:])] = list + [ext] if list else [ext]
versionSet.add(Version(ext.versionInCore[1:]))
lines = addVersionDefines(versionSet) + [
"",
"void getCoreDeviceExtensionsImpl (deUint32 coreVersion, ::std::vector<const char*>&%s)" % (" dst" if len(deviceMap) != 0 else ""),
"{"] + writeExtensionsForVersions(deviceMap) + [
"}",
"",
"void getCoreInstanceExtensionsImpl (deUint32 coreVersion, ::std::vector<const char*>&%s)" % (" dst" if len(instanceMap) != 0 else ""),
"{"] + writeExtensionsForVersions(instanceMap) + [
"}",
""] + removeVersionDefines(versionSet)
writeInlFile(filename, INL_HEADER, lines)
def writeCoreFunctionalities(api, filename):
functionOriginValues = ["FUNCTIONORIGIN_PLATFORM", "FUNCTIONORIGIN_INSTANCE", "FUNCTIONORIGIN_DEVICE"]
lines = addVersionDefines([Version((1, 0, 0)), Version((1, 1, 0))]) + [
"",
'enum FunctionOrigin', '{'] + [line for line in indentLines([
'\t' + functionOriginValues[0] + '\t= 0,',
'\t' + functionOriginValues[1] + ',',
'\t' + functionOriginValues[2]])] + [
"};",
"",
"typedef ::std::pair<const char*, FunctionOrigin> FunctionInfo;",
"typedef ::std::vector<FunctionInfo> FunctionInfosList;",
"typedef ::std::map<deUint32, FunctionInfosList> ApisMap;",
"",
"void initApisMap (ApisMap& apis)",
"{",
" apis.clear();",
" apis.insert(::std::pair<deUint32, FunctionInfosList>(" + str(Version((1, 0, 0))) + ", FunctionInfosList()));",
" apis.insert(::std::pair<deUint32, FunctionInfosList>(" + str(Version((1, 1, 0))) + ", FunctionInfosList()));",
""]
def list10Funcs ():
for fun in api.functions:
if fun.apiVersion == 'VK_VERSION_1_0':
insert = ' apis[' + str(Version((1, 0, 0))) + '].push_back(FunctionInfo("' + fun.name + '",\t' + functionOriginValues[fun.getType()] + '));'
yield insert
def listAllFuncs ():
for fun in api.extensions[0].functions:
insert = ' apis[' + str(Version((1, 1, 0))) + '].push_back(FunctionInfo("' + fun.name + '",\t' + functionOriginValues[fun.getType()] + '));'
yield insert
lines = lines + [line for line in indentLines(list10Funcs())]
lines.append("")
lines = lines + [line for line in indentLines(listAllFuncs())]
lines.append("}")
lines.append("")
lines = lines + removeVersionDefines([Version((1, 0, 0)), Version((1, 1, 0))])
writeInlFile(filename, INL_HEADER, lines)
if __name__ == "__main__":
src = readFile(VULKAN_H)
api = parseAPI(src)
platformFuncs = [Function.TYPE_PLATFORM]
instanceFuncs = [Function.TYPE_INSTANCE]
deviceFuncs = [Function.TYPE_DEVICE]
writeHandleType (api, os.path.join(VULKAN_DIR, "vkHandleType.inl"))
writeBasicTypes (api, os.path.join(VULKAN_DIR, "vkBasicTypes.inl"))
writeCompositeTypes (api, os.path.join(VULKAN_DIR, "vkStructTypes.inl"))
writeInterfaceDecl (api, os.path.join(VULKAN_DIR, "vkVirtualPlatformInterface.inl"), platformFuncs, False)
writeInterfaceDecl (api, os.path.join(VULKAN_DIR, "vkVirtualInstanceInterface.inl"), instanceFuncs, False)
writeInterfaceDecl (api, os.path.join(VULKAN_DIR, "vkVirtualDeviceInterface.inl"), deviceFuncs, False)
writeInterfaceDecl (api, os.path.join(VULKAN_DIR, "vkConcretePlatformInterface.inl"), platformFuncs, True)
writeInterfaceDecl (api, os.path.join(VULKAN_DIR, "vkConcreteInstanceInterface.inl"), instanceFuncs, True)
writeInterfaceDecl (api, os.path.join(VULKAN_DIR, "vkConcreteDeviceInterface.inl"), deviceFuncs, True)
writeFunctionPtrTypes (api, os.path.join(VULKAN_DIR, "vkFunctionPointerTypes.inl"))
writeFunctionPointers (api, os.path.join(VULKAN_DIR, "vkPlatformFunctionPointers.inl"), platformFuncs)
writeFunctionPointers (api, os.path.join(VULKAN_DIR, "vkInstanceFunctionPointers.inl"), instanceFuncs)
writeFunctionPointers (api, os.path.join(VULKAN_DIR, "vkDeviceFunctionPointers.inl"), deviceFuncs)
writeInitFunctionPointers (api, os.path.join(VULKAN_DIR, "vkInitPlatformFunctionPointers.inl"), platformFuncs, lambda f: f.name != "vkGetInstanceProcAddr")
writeInitFunctionPointers (api, os.path.join(VULKAN_DIR, "vkInitInstanceFunctionPointers.inl"), instanceFuncs)
writeInitFunctionPointers (api, os.path.join(VULKAN_DIR, "vkInitDeviceFunctionPointers.inl"), deviceFuncs)
writeFuncPtrInterfaceImpl (api, os.path.join(VULKAN_DIR, "vkPlatformDriverImpl.inl"), platformFuncs, "PlatformDriver")
writeFuncPtrInterfaceImpl (api, os.path.join(VULKAN_DIR, "vkInstanceDriverImpl.inl"), instanceFuncs, "InstanceDriver")
writeFuncPtrInterfaceImpl (api, os.path.join(VULKAN_DIR, "vkDeviceDriverImpl.inl"), deviceFuncs, "DeviceDriver")
writeStrUtilProto (api, os.path.join(VULKAN_DIR, "vkStrUtil.inl"))
writeStrUtilImpl (api, os.path.join(VULKAN_DIR, "vkStrUtilImpl.inl"))
writeRefUtilProto (api, os.path.join(VULKAN_DIR, "vkRefUtil.inl"))
writeRefUtilImpl (api, os.path.join(VULKAN_DIR, "vkRefUtilImpl.inl"))
writeStructTraitsImpl (api, os.path.join(VULKAN_DIR, "vkGetStructureTypeImpl.inl"))
writeNullDriverImpl (api, os.path.join(VULKAN_DIR, "vkNullDriverImpl.inl"))
writeTypeUtil (api, os.path.join(VULKAN_DIR, "vkTypeUtil.inl"))
writeSupportedExtenions (api, os.path.join(VULKAN_DIR, "vkSupportedExtensions.inl"))
writeCoreFunctionalities (api, os.path.join(VULKAN_DIR, "vkCoreFunctionalities.inl"))
| 37.729239 | 225 | 0.694281 | [
"Apache-2.0"
] | LGWingEmulator/external-deqp | external/vulkancts/scripts/gen_framework.py | 54,066 | Python |
# -*- coding: utf-8 -*-
"""Title translate module."""
#
# (C) Rob W.W. Hooft, 2003
# (C) Yuri Astrakhan, 2005
# (C) Pywikibot team, 2003-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id: 790e17e8c0da2027ad0cb511c168c353056583aa $'
#
import re
import pywikibot
import pywikibot.date as date
from pywikibot import config
from pywikibot.tools import deprecated_args
@deprecated_args(family=None)
def translate(page=None, hints=None, auto=True, removebrackets=False,
site=None):
"""
Return a list of links to pages on other sites based on hints.
Entries for single page titles list those pages. Page titles for entries
such as "all:" or "xyz:" or "20:" are first built from the page title of
'page' and then listed. When 'removebrackets' is True, a trailing pair of
brackets and the text between them is removed from the page title.
If 'auto' is true, known year and date page titles are autotranslated
to all known target languages and inserted into the list.
"""
result = set()
assert page or site
if site is None and page:
site = page.site
if hints:
for h in hints:
if ':' not in h:
# argument given as -hint:xy where xy is a language code
codes = h
newname = ''
else:
codes, newname = h.split(':', 1)
if newname == '':
# if given as -hint:xy or -hint:xy:, assume that there should
# be a page in language xy with the same title as the page
# we're currently working on ...
if page is None:
continue
newname = page.title(withNamespace=False)
# ... unless we do want brackets
if removebrackets:
newname = re.sub(re.compile(r"\W*?\(.*?\)\W*?",
re.UNICODE), u" ", newname)
try:
number = int(codes)
codes = site.family.languages_by_size[:number]
except ValueError:
if codes == 'all':
codes = site.family.languages_by_size
elif codes in site.family.language_groups:
codes = site.family.language_groups[codes]
else:
codes = codes.split(',')
for newcode in codes:
if newcode in site.languages():
if newcode != site.code:
ns = page.namespace() if page else 0
x = pywikibot.Link(newname,
site.getSite(code=newcode),
defaultNamespace=ns)
result.add(x)
else:
if config.verbose_output:
pywikibot.output(u"Ignoring unknown language code %s"
% newcode)
# Autotranslate dates into all other languages, the rest will come from
# existing interwiki links.
if auto and page:
# search inside all dictionaries for this link
sitelang = page.site.code
dictName, value = date.getAutoFormat(sitelang, page.title())
if dictName:
if True:
pywikibot.output(
u'TitleTranslate: %s was recognized as %s with value %d'
% (page.title(), dictName, value))
for entryLang, entry in date.formats[dictName].items():
if entryLang not in site.languages():
continue
if entryLang != sitelang:
if True:
newname = entry(value)
x = pywikibot.Link(
newname,
pywikibot.Site(code=entryLang,
fam=site.family))
result.add(x)
return list(result)
| 37.504505 | 77 | 0.515974 | [
"MIT"
] | h4ck3rm1k3/pywikibot-core | pywikibot/titletranslate.py | 4,120 | Python |
# Lint as: python2, python3
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.components.pusher.executor."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
from google.protobuf import json_format
from tfx.components.pusher import executor
from tfx.proto import pusher_pb2
from tfx.types import standard_artifacts
class ExecutorTest(tf.test.TestCase):
def setUp(self):
super(ExecutorTest, self).setUp()
self._source_data_dir = os.path.join(
os.path.dirname(os.path.dirname(__file__)), 'testdata')
self._output_data_dir = os.path.join(
os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()),
self._testMethodName)
tf.io.gfile.makedirs(self._output_data_dir)
self._model_export = standard_artifacts.Model()
self._model_export.uri = os.path.join(self._source_data_dir,
'trainer/current/')
self._model_blessing = standard_artifacts.ModelBlessing()
self._input_dict = {
'model_export': [self._model_export],
'model_blessing': [self._model_blessing],
}
self._model_push = standard_artifacts.PushedModel()
self._model_push.uri = os.path.join(self._output_data_dir, 'model_push')
tf.io.gfile.makedirs(self._model_push.uri)
self._output_dict = {
'model_push': [self._model_push],
}
self._serving_model_dir = os.path.join(self._output_data_dir,
'serving_model_dir')
tf.io.gfile.makedirs(self._serving_model_dir)
self._exec_properties = {
'push_destination':
json_format.MessageToJson(
pusher_pb2.PushDestination(
filesystem=pusher_pb2.PushDestination.Filesystem(
base_directory=self._serving_model_dir)),
preserving_proto_field_name=True),
}
self._executor = executor.Executor()
def testDoBlessed(self):
self._model_blessing.uri = os.path.join(self._source_data_dir,
'model_validator/blessed/')
self._model_blessing.set_int_custom_property('blessed', 1)
self._executor.Do(self._input_dict, self._output_dict,
self._exec_properties)
self.assertNotEqual(0, len(tf.io.gfile.listdir(self._serving_model_dir)))
self.assertNotEqual(0, len(tf.io.gfile.listdir(self._model_push.uri)))
self.assertEqual(
1, self._model_push.artifact.custom_properties['pushed'].int_value)
def testDoNotBlessed(self):
self._model_blessing.uri = os.path.join(self._source_data_dir,
'model_validator/not_blessed/')
self._model_blessing.set_int_custom_property('blessed', 0)
self._executor.Do(self._input_dict, self._output_dict,
self._exec_properties)
self.assertEqual(0, len(tf.io.gfile.listdir(self._serving_model_dir)))
self.assertEqual(0, len(tf.io.gfile.listdir(self._model_push.uri)))
self.assertEqual(
0, self._model_push.artifact.custom_properties['pushed'].int_value)
if __name__ == '__main__':
tf.test.main()
| 40.967391 | 77 | 0.694879 | [
"Apache-2.0"
] | etarakci-hvl/tfx | tfx/components/pusher/executor_test.py | 3,769 | Python |
"""
Create a flat plate of length 1.0 with aspect ratio 2.0 and a 30-degree
inclination.
The plate is discretized with spacing 0.04 in the x-y plane and with spacing
0.04 along the z-direction.
"""
import math
import pathlib
import numpy
# Flat-plate's parameters.
L = 1.0 # chord length
AR = 2.0 # aspect ratio
xc, yc, zc = 0.0, 0.0, 0.0 # center's coordinates
aoa = 30.0 # angle of inclination in degrees
ds = 0.04 # mesh spacing
simu_dir = pathlib.Path(__file__).absolute().parents[1]
# Generate coordinates of the flat plate.
n = math.ceil(L / ds)
s = numpy.linspace(xc - L / 2, xc + L / 2, num=n + 1)
x = xc + numpy.cos(numpy.radians(-aoa)) * s
y = yc + numpy.sin(numpy.radians(-aoa)) * s
nz = math.ceil(L * AR / ds)
z = numpy.linspace(zc - L * AR / 2, zc + L * AR / 2, num=nz + 1)
# Write coordinates into file.
filepath = simu_dir / 'flatplate.body'
with open(filepath, 'w') as outfile:
outfile.write('{}\n'.format(x.size * z.size))
for zi in z:
with open(filepath, 'ab') as outfile:
numpy.savetxt(outfile, numpy.c_[x, y, zi * numpy.ones(x.size)])
| 27.794872 | 76 | 0.657749 | [
"BSD-3-Clause"
] | CFD-lab-ZJU/PetIBM | examples/decoupledibpm/flatplate3dRe100AoA30_GPU/scripts/createBody.py | 1,084 | Python |
# coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for utils.random_py_environment."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tf_agents.environments import random_py_environment
from tf_agents.specs import array_spec
from tf_agents.utils import test_utils
class RandomPyEnvironmentTest(parameterized.TestCase, test_utils.TestCase):
def testEnvResetAutomatically(self):
obs_spec = array_spec.BoundedArraySpec((2, 3), np.int32, -10, 10)
env = random_py_environment.RandomPyEnvironment(obs_spec)
time_step = env.step([0])
self.assertTrue(np.all(time_step.observation >= -10))
self.assertTrue(np.all(time_step.observation <= 10))
self.assertTrue(time_step.is_first())
while not time_step.is_last():
time_step = env.step([0])
self.assertTrue(np.all(time_step.observation >= -10))
self.assertTrue(np.all(time_step.observation <= 10))
time_step = env.step([0])
self.assertTrue(np.all(time_step.observation >= -10))
self.assertTrue(np.all(time_step.observation <= 10))
self.assertTrue(time_step.is_first())
@parameterized.named_parameters([
('OneStep', 1),
('FiveSteps', 5),
])
def testEnvMinDuration(self, min_duration):
obs_spec = array_spec.BoundedArraySpec((2, 3), np.int32, -10, 10)
env = random_py_environment.RandomPyEnvironment(
obs_spec, episode_end_probability=0.9, min_duration=min_duration)
num_episodes = 100
for _ in range(num_episodes):
time_step = env.step([0])
self.assertTrue(time_step.is_first())
num_steps = 0
while not time_step.is_last():
time_step = env.step([0])
num_steps += 1
self.assertGreaterEqual(num_steps, min_duration)
@parameterized.named_parameters([
('OneStep', 1),
('FiveSteps', 5),
])
def testEnvMaxDuration(self, max_duration):
obs_spec = array_spec.BoundedArraySpec((2, 3), np.int32, -10, 10)
env = random_py_environment.RandomPyEnvironment(
obs_spec, episode_end_probability=0.1, max_duration=max_duration)
num_episodes = 100
for _ in range(num_episodes):
time_step = env.step([0])
self.assertTrue(time_step.is_first())
num_steps = 0
while not time_step.is_last():
time_step = env.step([0])
num_steps += 1
self.assertLessEqual(num_steps, max_duration)
def testEnvChecksActions(self):
obs_spec = array_spec.BoundedArraySpec((2, 3), np.int32, -10, 10)
action_spec = array_spec.BoundedArraySpec((2, 2), np.int32, -10, 10)
env = random_py_environment.RandomPyEnvironment(
obs_spec, action_spec=action_spec)
env.step(np.array([[0, 0], [0, 0]]))
with self.assertRaises(ValueError):
env.step([0])
def testRewardFnCalled(self):
def reward_fn(unused_step_type, action, unused_observation):
return action
action_spec = array_spec.BoundedArraySpec((1,), np.int32, -10, 10)
observation_spec = array_spec.BoundedArraySpec((1,), np.int32, -10, 10)
env = random_py_environment.RandomPyEnvironment(
observation_spec, action_spec, reward_fn=reward_fn)
time_step = env.step(1) # No reward in first time_step
self.assertEqual(0.0, time_step.reward)
time_step = env.step(1)
self.assertEqual(1, time_step.reward)
def testRendersImage(self):
action_spec = array_spec.BoundedArraySpec((1,), np.int32, -10, 10)
observation_spec = array_spec.BoundedArraySpec((1,), np.int32, -10, 10)
env = random_py_environment.RandomPyEnvironment(
observation_spec, action_spec, render_size=(4, 4, 3))
env.reset()
img = env.render()
self.assertTrue(np.all(img < 256))
self.assertTrue(np.all(img >= 0))
self.assertEqual((4, 4, 3), img.shape)
self.assertEqual(np.uint8, img.dtype)
def testBatchSize(self):
batch_size = 3
obs_spec = array_spec.BoundedArraySpec((2, 3), np.int32, -10, 10)
env = random_py_environment.RandomPyEnvironment(obs_spec,
batch_size=batch_size)
time_step = env.step([0])
self.assertEqual(time_step.observation.shape, (3, 2, 3))
self.assertEqual(time_step.reward.shape[0], batch_size)
self.assertEqual(time_step.discount.shape[0], batch_size)
def testCustomRewardFn(self):
obs_spec = array_spec.BoundedArraySpec((2, 3), np.int32, -10, 10)
batch_size = 3
env = random_py_environment.RandomPyEnvironment(
obs_spec,
reward_fn=lambda *_: np.ones(batch_size),
batch_size=batch_size)
env._done = False
env.reset()
time_step = env.step([0])
self.assertSequenceAlmostEqual([1.0] * 3, time_step.reward)
def testRewardCheckerBatchSizeOne(self):
# Ensure batch size 1 with scalar reward works
obs_spec = array_spec.BoundedArraySpec((2, 3), np.int32, -10, 10)
env = random_py_environment.RandomPyEnvironment(
obs_spec,
reward_fn=lambda *_: np.array([1.0]),
batch_size=1)
env._done = False
env.reset()
time_step = env.step([0])
self.assertEqual(time_step.reward, 1.0)
def testRewardCheckerSizeMismatch(self):
# Ensure custom scalar reward with batch_size greater than 1 raises
# ValueError
obs_spec = array_spec.BoundedArraySpec((2, 3), np.int32, -10, 10)
env = random_py_environment.RandomPyEnvironment(
obs_spec,
reward_fn=lambda *_: 1.0,
batch_size=5)
env.reset()
env._done = False
with self.assertRaises(ValueError):
env.step([0])
if __name__ == '__main__':
test_utils.main()
| 34.77095 | 75 | 0.696497 | [
"Apache-2.0"
] | ChengshuLi/agents | tf_agents/environments/random_py_environment_test.py | 6,224 | Python |
# Copyright 2017 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import service
from oslo_utils import timeutils
from sqlalchemy.orm import exc as orm_exc
from apmec.common import topics
from apmec import context as t_context
from apmec.db.common_services import common_services_db
from apmec.db.meo import meo_db
from apmec.extensions import meo
from apmec import manager
from apmec.plugins.common import constants
from apmec import service as apmec_service
from apmec import version
LOG = logging.getLogger(__name__)
class Conductor(manager.Manager):
def __init__(self, host, conf=None):
if conf:
self.conf = conf
else:
self.conf = cfg.CONF
super(Conductor, self).__init__(host=self.conf.host)
def update_vim(self, context, vim_id, status):
t_admin_context = t_context.get_admin_context()
update_time = timeutils.utcnow()
with t_admin_context.session.begin(subtransactions=True):
try:
query = t_admin_context.session.query(meo_db.Vim)
query.filter(
meo_db.Vim.id == vim_id).update(
{'status': status,
'updated_at': update_time})
except orm_exc.NoResultFound:
raise meo.VimNotFoundException(vim_id=vim_id)
event_db = common_services_db.Event(
resource_id=vim_id,
resource_type=constants.RES_TYPE_VIM,
resource_state=status,
event_details="",
event_type=constants.RES_EVT_MONITOR,
timestamp=update_time)
t_admin_context.session.add(event_db)
return status
def init(args, **kwargs):
cfg.CONF(args=args, project='apmec',
version='%%prog %s' % version.version_info.release_string(),
**kwargs)
# FIXME(ihrachys): if import is put in global, circular import
# failure occurs
from apmec.common import rpc as n_rpc
n_rpc.init(cfg.CONF)
def main(manager='apmec.conductor.conductor_server.Conductor'):
init(sys.argv[1:])
logging.setup(cfg.CONF, "apmec")
oslo_messaging.set_transport_defaults(control_exchange='apmec')
logging.setup(cfg.CONF, "apmec")
cfg.CONF.log_opt_values(LOG, logging.DEBUG)
server = apmec_service.Service.create(
binary='apmec-conductor',
topic=topics.TOPIC_CONDUCTOR,
manager=manager)
service.launch(cfg.CONF, server).wait()
| 34.597826 | 78 | 0.678291 | [
"Apache-2.0"
] | openMECPlatform/apmec | apmec/conductor/conductor_server.py | 3,183 | Python |
# -*- coding: utf-8 -*-
import argparse
from GoogleScraper.version import __version__
def get_command_line(only_print_help=False):
"""
Parse command line arguments when GoogleScraper is used as a CLI application.
Returns:
The configuration as a dictionary that determines the behaviour of the app.
"""
parser = argparse.ArgumentParser(prog='GoogleScraper',
description='Scrapes the Google, Yandex, Bing and many other search engines by '
'forging http requests that imitate browser searches or by using real '
'browsers controlled by the selenium framework. '
'Multithreading support.',
epilog='GoogleScraper {version}. This program might infringe the TOS of the '
'search engines. Please use it on your own risk. (c) by Nikolai Tschacher'
', 2012-2018. incolumitas.com'.format(version=__version__))
parser.add_argument('-m', '--scrape-method', type=str, default='http',
help='The scraping type. There are currently three types: "http", "selenium" and "http-async". '
'"Http" scrapes with raw http requests, whereas "selenium" uses the selenium framework to '
'remotely control browsers. "http-async" makes use of gevent and is well suited for '
'extremely fast and explosive scraping jobs. You may search more than 1000 requests per '
'second if you have the necessary number of proxies available. ',
choices=('http', 'selenium', 'http-async'))
parser.add_argument('--sel-browser', choices=['firefox', 'chrome'], default='chrome',
help='The browser frontend for selenium scraping mode. Takes only effect if --scrape-method is set to "selenium"')
parser.add_argument('--browser-mode', choices=['normal', 'headless'], default='normal',
help='In which mode the browser is started. Valid values = (normal, headless)')
keyword_group = parser.add_mutually_exclusive_group()
keyword_group.add_argument('-q', '--keyword', type=str, action='store', dest='keyword',
help='The search keyword to scrape for. If you need to scrape multiple keywords, use '
'the --keyword-file flag')
keyword_group.add_argument('--keyword-file', type=str, action='store', default='',
help='Keywords to search for. One keyword per line. Empty lines are ignored. '
'Alternatively, you may specify the path to an python module (must end with the '
'.py suffix) where the keywords must be held in a dictionary with the name "scrape_'
'jobs".')
parser.add_argument('-o-', '--output-filename', type=str, action='store', default='',
help='The name of the output file. If the file ending is "json", write a json file, if the '
'ending is "csv", write a csv file.')
parser.add_argument('--shell', action='store_true', default=False,
help='Fire up a shell with a loaded sqlalchemy session.')
parser.add_argument('-n', '--num-results-per-page', type=int,
action='store', default=10,
help='The number of results per page. Must be smaller than 100, by default 50 for raw mode and '
'10 for selenium mode. Some search engines ignore this setting.')
parser.add_argument('-p', '--num-pages-for-keyword', type=int, action='store',
default=1,
help='The number of pages to request for each keyword. Each page is requested by a unique '
'connection and if possible by a unique IP (at least in "http" mode).')
parser.add_argument('-z', '--num-workers', type=int, default=1,
action='store',
help='This arguments sets the number of browser instances for selenium mode or the number of '
'worker threads in http mode.')
parser.add_argument('-t', '--search-type', type=str, action='store', default='normal',
help='The searchtype to launch. May be normal web search, image search, news search or video '
'search.')
parser.add_argument('--proxy-file', type=str, dest='proxy_file', action='store',
required=False, help='A filename for a list of proxies (supported are HTTP PROXIES, SOCKS4/5) '
'with the following format: "Proxyprotocol (proxy_ip|proxy_host):Port\n"'
'Example file: socks4 127.0.0.1:99\nsocks5 33.23.193.22:1080\n')
parser.add_argument('--config-file', type=str, dest='config_file', action='store',
help='The path to the configuration file for GoogleScraper. Normally you won\'t need this, '
'because GoogleScrape comes shipped with a thoroughly commented configuration file named '
'"scrape_config.py"')
parser.add_argument('--check-detection', type=str, dest='check_detection', action='store',
help='Check if the given search engine blocked you from scrapign. Often detection can be determined'
'if you have to solve a captcha.')
parser.add_argument('--simulate', action='store_true', default=False, required=False,
help='''If this flag is set, the scrape job and its estimated length will be printed.''')
loglevel_help = '''
Set the debug level of the application. Use the string representation
instead of the numbers. High numbers will output less, low numbers more.
CRITICAL = 50,
FATAL = CRITICAL,
ERROR = 40,
WARNING = 30,
WARN = WARNING,
INFO = 20,
DEBUG = 10,
NOTSET = 0
'''
parser.add_argument('-v', '--verbosity', '--loglevel',
dest='log_level', default='INFO', type = str.lower,
choices=['debug', 'info', 'warning', 'warn', 'error', 'critical', 'fatal'], help=loglevel_help)
parser.add_argument('--print-results', choices=['all', 'summarize'], default='all',
help='Whether to print all results ("all"), or only print a summary ("summarize")')
parser.add_argument('--view-config', action='store_true', default=False,
help="Print the current configuration to stdout. You may use it to create and tweak your own "
"config file from it.")
parser.add_argument('-V', '--v', '--version', action='store_true', default=False, dest='version',
help='Prints the version of GoogleScraper')
parser.add_argument('--clean', action='store_true', default=False,
help='Cleans all stored data. Please be very careful when you use this flag.')
parser.add_argument('--mysql-proxy-db', action='store',
help="A mysql connection string for proxies to use. Format: mysql://<username>:<password>@"
"<host>/<dbname>. Has precedence over proxy files.")
parser.add_argument('-s', '--search-engines', action='store', default=['google'],
help='What search engines to use (See GoogleScraper --config for the all supported). If you '
'want to use more than one at the same time, just separate with commatas: "google, bing, '
'yandex". If you want to use all search engines that are available, give \'*\' as '
'argument.')
#custom arguments
parser.add_argument('--proxy_chain_ips', type=str, action='store', default="local",
help='proxy_chain_ips to forward requests')
parser.add_argument('--strict', action='store_true', default=False,
help='Defines strict google / bing search')
parser.add_argument('--no-cache', action='store_true', default=False,
help='Disable caching')
if only_print_help:
parser.print_help()
else:
args = parser.parse_args()
return vars(args)
| 58.302013 | 138 | 0.574191 | [
"Apache-2.0"
] | hnhnarek/GoogleScraper | GoogleScraper/commandline.py | 8,687 | Python |
# -*- coding: utf-8 -*-
"""
auto rule template
~~~~
:author: LoRexxar <[email protected]>
:homepage: https://github.com/LoRexxar/Kunlun-M
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2017 LoRexxar. All rights reserved
"""
from utils.api import *
class CVI_3003():
"""
rule class
"""
def __init__(self):
self.svid = 3003
self.language = "javascript"
self.author = "LoRexxar"
self.vulnerability = "RCE"
self.description = "remote? code exec"
# status
self.status = True
# 部分配置
self.match_mode = "function-param-regex"
self.match = r"eval|setTimeout"
# for solidity
self.match_name = None
self.black_list = None
# for chrome ext
self.keyword = None
# for regex
self.unmatch = None
self.vul_function = None
def main(self, regex_string):
"""
regex string input
:regex_string: regex match string
:return:
"""
pass
| 20.185185 | 64 | 0.555046 | [
"MIT"
] | Afant1/Kunlun-M | rules/javascript/CVI_3003.py | 1,098 | Python |
'''
Exercise 5: Vectors
A vector of dimension 𝑛𝑛 can be represented by a list in Python. For example, a vector of
dimension 3 could represent a point in space, and a vector of dimension 4 could represent a
point in space and time (the fourth dimension being the time). In mathematical notation, a
vector of dimension 3 is represented as follow:
�
𝑎𝑎
𝑏𝑏
𝑐𝑐
�
The vector could be stored in a Python list [a, b, c]. There are two simple operations that
can be done on vector, and the result of the two operation is also a vector. The two operations
are:
Scalar product: 𝜆𝜆 ∙ �
𝑎𝑎
𝑏𝑏
𝑐𝑐
� = �
𝜆𝜆 ∙ 𝑎𝑎
𝜆𝜆 ∙ 𝑏𝑏
𝜆𝜆 ∙ 𝑐𝑐
�
Addition: �
𝑎𝑎
𝑏𝑏
𝑐𝑐
� + �
𝑑𝑑
𝑒𝑒
𝑓𝑓
� = �
𝑎𝑎 + 𝑑𝑑
𝑏𝑏 + 𝑒𝑒
𝑐𝑐 + 𝑓𝑓
�
Implement two functions:
1. scalar_product(scalar, vector) where scalar is a float and vector is a list
of float. The function returns the scalar product of the two parameters.
2. vector_addition(vector1, vector2) where vector1 and vector2 are
lists of float. The function returns the vector addition of the two parameters. If
vector1 and vector2 don’t have the same dimension, you should print an error
message and return None.
'''
def scalar_product(scalar, vector):
for x in range(len(vector)):
vector[x]*=scalar
return vector
def vector_addition(vector1, vector2):
if(len(vector2)!=len(vector1)):
return 'Error'
for x in range(len(vector1)):
vector1[x]=int(vector1[x])+int(vector2[x])
return vector1
print(scalar_product(int(input('Enter Scalar Value: ')),input('Enter a Matrix seperated by coma: ').split(',') ))
print(vector_addition( input('Enter first Matrix to add: ').split(',') , input('Enter second Matrix to add: ').split(',') )) | 27.442623 | 124 | 0.717443 | [
"Apache-2.0"
] | dragoonfirestormar/SOFT1 | week4/week4_additionalexercice_5.py | 1,864 | Python |
from nose.tools import assert_equals
from framework.pages.loginPage import loginPage
from framework.pages.headerPage import headerPage
from framework.core.webdriverfactory import WebDriverFactory
from framework.core.configuration import webdriver_configuration
class testLogin():
baseUrl = "http://twiindan.pythonanywhere.com/admin"
@classmethod
def setup_class(self):
wdf = WebDriverFactory(webdriver_configuration)
self.driver = wdf.getWebDriverInstance()
self.login_page = loginPage(self.driver)
def setup(self):
self.login_page.navigate()
def test_correct_login(self):
self.login_page.locate_elements()
self.login_page.fillUsername('user1')
self.login_page.fillPassword('selenium')
self.login_page.submitClick()
@classmethod
def teardown_class(self):
self.driver.quit()
| 26.029412 | 64 | 0.732203 | [
"Apache-2.0"
] | twiindan/selenium_lessons | 04_Selenium/framework/tests/testLogin.py | 885 | Python |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pandas as pd
class Index(object):
def __init__(self, idx):
self.idx = idx
@classmethod
def to_pandas(indices):
if isinstance(indices[0], pd.RangeIndex):
merged = indices[0]
for index in indices[1:]:
merged = merged.union(index)
return merged
else:
return indices[0].append(indices[1:])
| 23.045455 | 49 | 0.625247 | [
"Apache-2.0"
] | cathywu/ray | python/ray/dataframe/index.py | 507 | Python |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Stc(AutotoolsPackage):
"""STC: The Swift-Turbine Compiler"""
homepage = 'http://swift-lang.org/Swift-T'
url = 'http://swift-lang.github.io/swift-t-downloads/spack/stc-0.8.3.tar.gz'
git = "https://github.com/swift-lang/swift-t.git"
version('master', branch='master')
version('0.8.3', sha256='d61ca80137a955b12e84e41cb8a78ce1a58289241a2665076f12f835cf68d798')
version('0.8.2', sha256='13f0f03fdfcca3e63d2d58d7e7dbdddc113d5b9826c9357ab0713b63e8e42c5e')
depends_on('java', type=('build', 'run'))
depends_on('ant', type='build')
depends_on('turbine', type=('build', 'run'))
depends_on('turbine@master', type=('build', 'run'), when='@master')
depends_on('zsh', type=('build', 'run'))
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
@property
def configure_directory(self):
if self.version == Version('master'):
return 'stc/code'
else:
return '.'
def configure_args(self):
args = ['--with-turbine=' + self.spec['turbine'].prefix]
return args
| 34.175 | 95 | 0.659108 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 0t1s1/spack | var/spack/repos/builtin/packages/stc/package.py | 1,367 | Python |
from aws_cdk import (
aws_autoscaling as autoscaling,
aws_ec2 as ec2,
aws_ecs as ecs,
core,
)
class ECSCluster(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, *kwargs)
vpc = ec2.Vpc(
self, "MyVpc",
max_azs=2
)
asg = autoscaling.AutoScalingGroup(
self, "MyFleet",
instance_type=ec2.InstanceType("t2.xlarge"),
machine_image=ecs.EcsOptimizedAmi(),
associate_public_ip_address=True,
update_type=autoscaling.UpdateType.REPLACING_UPDATE,
desired_capacity=3,
vpc=vpc,
vpc_subnets={ 'subnet_type': ec2.SubnetType.PUBLIC },
)
cluster = ecs.Cluster(
self, 'EcsCluster',
vpc=vpc
)
cluster.add_auto_scaling_group(asg)
cluster.add_capacity("DefaultAutoScalingGroup",
instance_type=ec2.InstanceType("t2.micro"))
app = core.App()
ECSCluster(app, "MyFirstEcsCluster")
app.synth()
| 26.190476 | 73 | 0.583636 | [
"Apache-2.0"
] | 1digicoder/aws-cdk-examples | python/ecs/cluster/app.py | 1,100 | Python |
#!/usr/bin/env python
"""
Add docstring
"""
from datetime import datetime
from protorpc import messages
from google.appengine.ext import ndb
# Define game objects
class Game(ndb.Model):
"""Game object"""
deck = ndb.StringProperty(repeated=True)
disp_deck = ndb.StringProperty(repeated=True)
attempts_allowed = ndb.IntegerProperty(required=True)
attempts_remaining = ndb.IntegerProperty(required=True, default=30)
game_over = ndb.BooleanProperty(required=True, default=False)
attempts_made = ndb.IntegerProperty(required=True)
match_list = ndb.StringProperty(repeated=True)
match_list_int = ndb.IntegerProperty(repeated=True)
matches_found = ndb.IntegerProperty(required=True)
guess1_or_guess2 = ndb.IntegerProperty()
cancelled = ndb.BooleanProperty(required=True, default=False)
guess_history = ndb.StringProperty(repeated=True)
time_created = ndb.StringProperty(required=True)
user = ndb.KeyProperty(required=True, kind='User')
@classmethod
def new_game(cls, user, attempts, deck, disp_deck, attempts_made,
match_list, match_list_int, matches_found,
guess1_or_guess2, guess_history):
"""Create and return a new game"""
if attempts < 30 or attempts > 60:
raise ValueError(
'Number of attempts must be more than 29 and less than 61')
game = Game(
user=user,
deck=deck,
attempts_allowed=attempts,
attempts_remaining=attempts,
disp_deck=disp_deck,
attempts_made=attempts_made,
match_list=match_list,
match_list_int=match_list_int,
matches_found=matches_found,
guess1_or_guess2=guess1_or_guess2,
game_over=False,
cancelled=False,
guess_history=guess_history,
time_created=str(datetime.now()))
game.put()
return game
def to_form(self, message):
"""Return a GameForm representation of the game"""
form = GameForm()
form.urlsafe_key = self.key.urlsafe()
form.user_name = self.user.get().name
form.attempts_remaining = self.attempts_remaining
form.game_over = self.game_over
form.cancelled = self.cancelled
form.disp_deck = self.disp_deck
form.attempts_made = self.attempts_made
form.match_list = self.match_list
form.matches_found = self.matches_found
form.time_created = self.time_created
form.message = message
return form
def to_form_user_games(self):
"""Return a GameFormUserGame representation of the game;
this form displays a custom list of the game entities and is
used in the get_user_games endpoint"""
return GameFormUserGame(
urlsafe_key=self.key.urlsafe(),
user_name=self.user.get().name,
attempts_remaining=self.attempts_remaining,
game_over=self.game_over,
disp_deck=self.disp_deck,
attempts_made=self.attempts_made,
match_list=self.match_list,
matches_found=self.matches_found,
time_created=self.time_created)
def to_form_game_history(self, message):
"""Return a GameHistory representation of the game;
this form displays a custom list of the game entities and is
used in the get_game_history endpoint"""
return GameHistory(
user_name=self.user.get().name,
guess_history=self.guess_history,
attempts_made=self.attempts_made,
match_list=self.match_list,
matches_found=self.matches_found,
deck=self.deck,
time_created=self.time_created,
message=message)
def end_game(self, won=False):
"""End the game; if won is True, the player won;
if won is False, the player lost"""
self.game_over = True
self.put()
# Add the game to the score board
# (a score is only returned when a game ends)
points = self.points = (
500 - ((self.attempts_made - self.matches_found) * 10))
score = Score(
user=self.user,
time_completed=str(datetime.now()),
won=won,
attempts_made=self.attempts_made,
game_deck=self.deck,
matches_found=self.matches_found,
points=points)
score.put()
# Message definitions
class GameForm(messages.Message):
"""Used for outbound game information"""
urlsafe_key = messages.StringField(1, required=True)
attempts_remaining = messages.IntegerField(2, required=True)
game_over = messages.BooleanField(3, required=True)
message = messages.StringField(4, required=True)
user_name = messages.StringField(5, required=True)
disp_deck = messages.StringField(6, repeated=True)
attempts_made = messages.IntegerField(7, required=True)
match_list = messages.StringField(8, repeated=True)
matches_found = messages.IntegerField(9, required=True, default=0)
cancelled = messages.BooleanField(10, required=True)
time_created = messages.StringField(11, required=True)
class GameFormUserGame(messages.Message):
"""Used for outbound information on the state of a
user's active game"""
urlsafe_key = messages.StringField(1, required=True)
attempts_remaining = messages.IntegerField(2, required=True)
game_over = messages.BooleanField(3, required=True)
user_name = messages.StringField(4, required=True)
disp_deck = messages.StringField(5, repeated=True)
attempts_made = messages.IntegerField(6, required=True)
match_list = messages.StringField(7, repeated=True)
matches_found = messages.IntegerField(8, required=True)
time_created = messages.StringField(9, required=True)
class GameForms(messages.Message):
"""Outbound container for a list of GameFormUserGame forms"""
items = messages.MessageField(GameFormUserGame, 1, repeated=True)
class NewGameForm(messages.Message):
"""Inbound form used to create a new game"""
user_name = messages.StringField(1, required=True)
attempts = messages.IntegerField(2, required=True)
class MakeMoveForm(messages.Message):
"""Inbound form used to make a move"""
guess = messages.IntegerField(1, required=True)
class ScoreForm(messages.Message):
"""Used for outbound score information for finished games"""
user_name = messages.StringField(1, required=True)
time_completed = messages.StringField(2, required=True)
won = messages.BooleanField(3, required=True)
attempts_made = messages.IntegerField(4, required=True)
game_deck = messages.StringField(5, repeated=True)
matches_found = messages.IntegerField(6, required=True, default=0)
points = messages.IntegerField(7, required=True, default=0)
class ScoreForms(messages.Message):
"""Outbound container for a list of ScoreForm forms"""
items = messages.MessageField(ScoreForm, 1, repeated=True)
class GameHistory(messages.Message):
"""Used for outbound information on each guess made
and the outcome of a game"""
user_name = messages.StringField(1, required=True)
guess_history = messages.StringField(2, repeated=True)
attempts_made = messages.IntegerField(3, required=True)
match_list = messages.StringField(4, repeated=True)
matches_found = messages.IntegerField(5, required=True)
deck = messages.StringField(6, repeated=True)
time_created = messages.StringField(7, required=True)
message = messages.StringField(8)
class StringMessage(messages.Message):
"""A single outbound string message"""
message = messages.StringField(1, required=True)
| 38.019608 | 75 | 0.685405 | [
"MIT"
] | bencam/pelmanism | models/game.py | 7,756 | Python |
import argparse
argparser = argparse.ArgumentParser()
argparser.add_argument("-m", "--map", dest="map_id",
help="checks the leaderboard on the given beatmap id against each other")
argparser.add_argument("-u", "--user", dest="user_id",
help="checks only the given user against the other leaderboard replays. Must be set with -m")
argparser.add_argument("-l", "--local", help=("compare scores under the replays/ directory to a beatmap leaderboard (if set with -m), "
"a score set by a user on a beatmap (if set with -m and -u) or the other scores in the folder "
"(default behavior)"), action="store_true")
argparser.add_argument("-t", "--threshold", help="sets the similarity threshold to print results that score under it. Defaults to 20", type=int, default=18)
argparser.add_argument("-a", "--auto", help="Sets the threshold to a number of standard deviations below the average similarity", type=float, dest="stddevs")
argparser.add_argument("-n", "--number", help="how many replays to get from a beatmap. No effect if not set with -m. Must be between 2 and 100 inclusive,"
"defaults to 50. NOTE: THE TIME COMPLEXITY OF THE COMPARISONS WILL SCALE WITH O(n^2).", type=int, default=50)
argparser.add_argument("-c", "--cache", help="If set, locally caches replays so they don't have to be redownloaded when checking the same map multiple times.",
action="store_true")
argparser.add_argument("-s", "--silent", help="If set, you will not be prompted for a visualization of comparisons under the threshold",
action="store_true")
argparser.add_argument("-v", "--verify", help="Takes 3 positional arguments - map id, user1 id and user2 id. Verifies that the scores are steals of each other", nargs=3)
argparser.add_argument("--version", help="Prints the program version", action="store_true")
| 68.433333 | 169 | 0.649294 | [
"MIT"
] | wmpmiles/circleguard | circleguard/argparser.py | 2,053 | Python |
'''Tests about the gzippy top-level functions.'''
import unittest
from test import scratch_file
import gzippy
class GzippyTest(unittest.TestCase):
'''Tests about the gzippy top-level functions.'''
def test_open_with_plus(self):
'''Opening with r+ is not allowed.'''
with scratch_file('example.gz') as path:
with open(path, 'w+') as fout:
pass
with self.assertRaises(ValueError):
with gzippy.open(path, 'r+') as fin:
pass
def test_open_with_append(self):
'''Opening in append mode is not allowed.'''
with scratch_file('example.gz') as path:
with open(path, 'w+') as fout:
pass
with self.assertRaises(ValueError):
with gzippy.open(path, 'ab') as fout:
pass
| 26.454545 | 53 | 0.561283 | [
"MIT"
] | seomoz/gzippy | test/test_gzippy.py | 873 | Python |
import copy
import json
import logging
import warnings
import requests
from elastalert.alerter import Alerter
from elastalert.exceptions import EAException
from elastalert.utils.time import DateTimeEncoder
from elastalert.utils.util import lookup_es_key
from requests import RequestException
log = logging.getLogger(__name__)
class SlackAlerter(Alerter):
""" Creates a Slack room message for each alert """
required_options = frozenset(["slack_webhook_url"])
def __init__(self, rule):
super(SlackAlerter, self).__init__(rule)
self.slack_webhook_url = self.rule["slack_webhook_url"]
if isinstance(self.slack_webhook_url, str):
self.slack_webhook_url = [self.slack_webhook_url]
self.slack_proxy = self.rule.get("slack_proxy", None)
self.slack_username_override = self.rule.get(
"slack_username_override", "elastalert"
)
self.slack_channel_override = self.rule.get("slack_channel_override", "")
if isinstance(self.slack_channel_override, str):
self.slack_channel_override = [self.slack_channel_override]
self.slack_title_link = self.rule.get("slack_title_link", "")
self.slack_title = self.rule.get("slack_title", "")
self.slack_emoji_override = self.rule.get("slack_emoji_override", ":ghost:")
self.slack_icon_url_override = self.rule.get("slack_icon_url_override", "")
self.slack_msg_color = self.rule.get("slack_msg_color", "danger")
self.slack_parse_override = self.rule.get("slack_parse_override", "none")
self.slack_text_string = self.rule.get("slack_text_string", "")
self.slack_alert_fields = self.rule.get("slack_alert_fields", "")
self.slack_ignore_ssl_errors = self.rule.get("slack_ignore_ssl_errors", False)
self.slack_timeout = self.rule.get("slack_timeout", 10)
self.slack_ca_certs = self.rule.get("slack_ca_certs")
self.slack_attach_kibana_discover_url = self.rule.get(
"slack_attach_kibana_discover_url", False
)
self.slack_kibana_discover_color = self.rule.get(
"slack_kibana_discover_color", "#ec4b98"
)
self.slack_kibana_discover_title = self.rule.get(
"slack_kibana_discover_title", "Discover in Kibana"
)
def format_body(self, body):
# https://api.slack.com/docs/formatting
return body
def get_aggregation_summary_text__maximum_width(self):
width = super(SlackAlerter, self).get_aggregation_summary_text__maximum_width()
# Reduced maximum width for prettier Slack display.
return min(width, 75)
def get_aggregation_summary_text(self, matches):
text = super(SlackAlerter, self).get_aggregation_summary_text(matches)
if text:
text = "```\n{0}```\n".format(text)
return text
def populate_fields(self, matches):
alert_fields = []
for arg in self.slack_alert_fields:
arg = copy.copy(arg)
arg["value"] = lookup_es_key(matches[0], arg["value"])
alert_fields.append(arg)
return alert_fields
def alert(self, matches):
body = self.create_alert_body(matches)
body = self.format_body(body)
# post to slack
headers = {"content-type": "application/json"}
# set https proxy, if it was provided
proxies = {"https": self.slack_proxy} if self.slack_proxy else None
payload = {
"username": self.slack_username_override,
"parse": self.slack_parse_override,
"text": self.slack_text_string,
"attachments": [
{
"color": self.slack_msg_color,
"title": self.create_title(matches),
"text": body,
"mrkdwn_in": ["text", "pretext"],
"fields": [],
}
],
}
# if we have defined fields, populate noteable fields for the alert
if self.slack_alert_fields != "":
payload["attachments"][0]["fields"] = self.populate_fields(matches)
if self.slack_icon_url_override != "":
payload["icon_url"] = self.slack_icon_url_override
else:
payload["icon_emoji"] = self.slack_emoji_override
if self.slack_title != "":
payload["attachments"][0]["title"] = self.slack_title
if self.slack_title_link != "":
payload["attachments"][0]["title_link"] = self.slack_title_link
if self.slack_attach_kibana_discover_url:
kibana_discover_url = lookup_es_key(matches[0], "kibana_discover_url")
if kibana_discover_url:
payload["attachments"].append(
{
"color": self.slack_kibana_discover_color,
"title": self.slack_kibana_discover_title,
"title_link": kibana_discover_url,
}
)
for url in self.slack_webhook_url:
for channel_override in self.slack_channel_override:
try:
if self.slack_ca_certs:
verify = self.slack_ca_certs
else:
verify = not self.slack_ignore_ssl_errors
if self.slack_ignore_ssl_errors:
requests.packages.urllib3.disable_warnings()
payload["channel"] = channel_override
response = requests.post(
url,
data=json.dumps(payload, cls=DateTimeEncoder),
headers=headers,
verify=verify,
proxies=proxies,
timeout=self.slack_timeout,
)
warnings.resetwarnings()
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to slack: %s" % e)
log.info("Alert '%s' sent to Slack" % self.rule["name"])
def get_info(self):
return {
"type": "slack",
"slack_username_override": self.slack_username_override,
}
| 40.606452 | 87 | 0.601525 | [
"Apache-2.0"
] | JasperJuergensen/elastalert | elastalert/alerter/slack_alerter.py | 6,294 | Python |
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 29 11:46:26 2022
@author: Pedro
"""
def search(lista, target) -> int:
for i in range(len(lista)):
if lista [i] == target:
return i
return -1
def search2(lista, target) -> int:
for i, element in enumerate(lista):
if element == target:
return i
return -1 | 19.888889 | 39 | 0.553073 | [
"MIT"
] | pgentil/PC | teoria/clase 29-03/algoritmos.py | 358 | Python |
from .agent import Figaro
from .handlers.arithmetichandler import ArithmeticHandler
from .handlers.elizastatementhandler import ElizaStatementHandler
| 30.2 | 65 | 0.880795 | [
"Apache-2.0"
] | rylans/figaro | figaro/__init__.py | 151 | Python |
"""Constants used to ensure topic consistantancy."""
STANDARD_RECEIVED = "standard_received"
EXTENDED_RECEIVED = "extended_received"
X10_RECEIVED = "x10_received"
ALL_LINKING_COMPLETED = "all_linking_completed"
BUTTON_EVENT_REPORT = "button_event_report"
USER_RESET_DETECTED = "user_reset_detected"
ALL_LINK_CLEANUP_FAILURE_REPORT = "all_link_cleanup_failure_report"
ALL_LINK_RECORD_RESPONSE = "all_link_record_response"
ALL_LINK_CLEANUP_STATUS_REPORT = "all_link_cleanup_status_report"
GET_IM_INFO = "get_im_info"
SEND_ALL_LINK_COMMAND = "send_all_link_command"
SEND_STANDARD = "send_standard"
SEND_EXTENDED = "send_extended"
X10_SEND = "x10_send"
START_ALL_LINKING = "start_all_linking"
CANCEL_ALL_LINKING = "cancel_all_linking"
SET_HOST_DEV_CAT = "set_host_dev_cat"
RESET_IM = "reset_im"
SET_ACK_MESSAGE_BYTE = "set_ack_message_byte"
GET_FIRST_ALL_LINK_RECORD = "get_first_all_link_record"
GET_NEXT_ALL_LINK_RECORD = "get_next_all_link_record"
SET_IM_CONFIGURATION = "set_im_configuration"
GET_ALL_LINK_RECORD_FOR_SENDER = "get_all_link_record_for_sender"
LED_ON = "led_on"
LED_OFF = "led_off"
MANAGE_ALL_LINK_RECORD = "manage_all_link_record"
SET_NAK_MESSAGE_BYTE = "set_nak_message_byte"
SET_ACK_MESSAGE_TWO_BYTES = "set_ack_message_two_bytes"
RF_SLEEP = "rf_sleep"
GET_IM_CONFIGURATION = "get_im_configuration"
# Command Topics
ASSIGN_TO_ALL_LINK_GROUP = "assign_to_all_link_group"
DELETE_FROM_ALL_LINK_GROUP = "delete_from_all_link_group"
PRODUCT_DATA_REQUEST = "product_data_request"
FX_USERNAME = "fx_username"
DEVICE_TEXT_STRING_REQUEST = "device_text_string_request"
SET_DEVICE_TEXT_STRING = "set_device_text_string"
SET_ALL_LINK_COMMAND_ALIAS = "set_all_link_command_alias"
SET_ALL_LINK = "set_all_link"
ENTER_LINKING_MODE = "enter_linking_mode"
ENTER_UNLINKING_MODE = "enter_unlinking_mode"
GET_INSTEON_ENGINE_VERSION = "get_insteon_engine_version"
PING = "ping"
ID_REQUEST = "id_request"
ID_REQUEST_RESPONSE = "id_request_response"
ON = "on"
ON_FAST = "on_fast"
OFF = "off"
OFF_FAST = "off_fast"
BRIGHTEN_ONE_STEP = "brighten_one_step"
DIM_ONE_STEP = "dim_one_step"
START_MANUAL_CHANGE_DOWN = "start_manual_change_down"
START_MANUAL_CHANGE_UP = "start_manual_change_up"
STOP_MANUAL_CHANGE = "stop_manual_change"
STATUS_REQUEST = "status_request"
GET_OPERATING_FLAGS = "get_operating_flags"
SET_OPERATING_FLAGS = "set_operating_flags"
INSTANT_CHANGE = "instant_change"
MANUALLY_TURNED_OFF = "manually_turned_off"
MANUALLY_TURNED_ON = "manually_turned_on"
REMOTE_SET_BUTTON_TAP1_TAP = "remote_set_button_tap1_tap"
REMOTE_SET_BUTTON_TAP2_TAP = "remote_set_button_tap2_tap"
SET_STATUS = "set_status"
SET_ADDRESS_MSB = "set_address_msb"
POKE_ONE_BYTE = "poke_one_byte"
PEEK_ONE_BYTE = "peek_one_byte"
PEEK_ONE_BYTE_INTERNAL = "peek_one_byte_internal"
POKE_ONE_BYTE_INTERNAL = "poke_one_byte_internal"
ON_AT_RAMP_RATE = "on_at_ramp_rate"
EXTENDED_GET_SET = "extended_get_set"
EXTENDED_GET_RESPONSE = "extended_get_response"
THERMOSTAT_SET_POINT_RESPONSE = "thermostat_set_point_response"
THERMOSTAT_STATUS_RESPONSE = "thermostat_status_response"
EXTENDED_GET_SET_2 = "extended_get_set_2"
OFF_AT_RAMP_RATE = "off_at_ramp_rate"
EXTENDED_READ_WRITE_ALDB = "extended_read_write_aldb"
EXTENDED_TRIGGER_ALL_LINK = "extended_trigger_all_link"
BEEP = "beep"
SPRINKLER_VALVE_ON = "sprinkler_valve_on"
SPRINKLER_VALVE_OFF = "sprinkler_valve_off"
SPRINKLER_PROGRAM_ON = "sprinkler_program_on"
SPRINKLER_PROGRAM_OFF = "sprinkler_program_off"
SPRINKLER_LOAD_INITIALIZATION_VALUES = "sprinkler_load_initialization_values"
SPRINKLER_LOAD_EEPROM_FROM_RAM = "sprinkler_load_eeprom_from_ram"
SPRINKLER_GET_VALVE_STATUS = "sprinkler_get_valve_status"
SPRINKLER_INHIBIT_COMMAND_ACCEPTANCE = "sprinkler_inhibit_command_acceptance"
SPRINKLER_RESUME_COMMAND_ACCEPTANCE = "sprinkler_resume_command_acceptance"
SPRINKLER_SKIP_FORWARD = "sprinkler_skip_forward"
SPRINKLER_SKIP_BACK = "sprinkler_skip_back"
SPRINKLER_ENABLE_PUMP_ON_V8 = "sprinkler_enable_pump_on_v8"
SPRINKLER_DISABLE_PUMP_ON_V8 = "sprinkler_disable_pump_on_v8"
SPRINKLER_BROADCAST_ON = "sprinkler_broadcast_on"
SPRINKLER_BROADCAST_OFF = "sprinkler_broadcast_off"
SPRINKLER_LOAD_RAM_FROM_EEPROM = "sprinkler_load_ram_from_eeprom"
SPRINKLER_SENSOR_ON = "sprinkler_sensor_on"
SPRINKLER_SENSOR_OFF = "sprinkler_sensor_off"
SPRINKLER_DIAGNOSTICS_ON = "sprinkler_diagnostics_on"
SPRINKLER_DIAGNOSTICS_OFF = "sprinkler_diagnostics_off"
SPRINKLER_GET_PROGRAM_REQUEST = "sprinkler_get_program_request"
IO_OUTPUT_ON = "io_output_on"
IO_OUTPUT_OFF = "io_output_off"
IO_ALARM_DATA_REQUEST = "io_alarm_data_request"
IO_WRITE_OUTPUT_PORT = "io_write_output_port"
IO_READ_INPUT_PORT = "io_read_input_port"
IO_GET_SENSOR_VALUE = "io_get_sensor_value"
IO_SET_SENSOR_1_NOMINAL_VALUE = "io_set_sensor_1_nominal_value"
IO_GET_SENSOR_ALARM_DELTA = "io_get_sensor_alarm_delta"
FAN_STATUS_REPORT = "fan_status_report"
IO_WRITE_CONFIGURATION_PORT = "io_write_configuration_port"
IO_READ_CONFIGURATION_PORT = "io_read_configuration_port"
IO_MODULE_LOAD_INITIALIZATION_VALUES = "io_module_load_initialization_values"
IO_MODULE_LOAD_EEPROM_FROM_RAM = "io_module_load_eeprom_from_ram"
IO_MODULE_STATUS_REQUEST = "io_module_status_request"
IO_MODULE_READ_ANALOG_ONCE = "io_module_read_analog_once"
IO_MODULE_READ_ANALOG_ALWAYS = "io_module_read_analog_always"
IO_MODULE_ENABLE_STATUS_CHANGE_MESSAGE = "io_module_enable_status_change_message"
IO_MODULE_DISABLE_STATUS_CHANGE_MESSAGE = "io_module_disable_status_change_message"
IO_MODULE_LOAD_RAM_FROM_EEPROM = "io_module_load_ram_from_eeprom"
IO_MODULE_SENSOR_ON = "io_module_sensor_on"
IO_MODULE_SENSOR_OFF = "io_module_sensor_off"
IO_MODULE_DIAGNOSTICS_ON = "io_module_diagnostics_on"
IO_MODULE_DIAGNOSTICS_OFF = "io_module_diagnostics_off"
POOL_DEVICE_ON = "pool_device_on"
POOL_DEVICE_OFF = "pool_device_off"
POOL_TEMPERATURE_UP = "pool_temperature_up"
POOL_TEMPERATURE_DOWN = "pool_temperature_down"
POOL_LOAD_INITIALIZATION_VALUES = "pool_load_initialization_values"
POOL_LOAD_EEPROM_FROM_RAM = "pool_load_eeprom_from_ram"
POOL_GET_POOL_MODE = "pool_get_pool_mode"
POOL_GET_AMBIENT_TEMPERATURE = "pool_get_ambient_temperature"
POOL_GET_WATER_TEMPERATURE = "pool_get_water_temperature"
POOL_GET_PH = "pool_get_ph"
DOOR_MOVE_RAISE_DOOR = "door_move_raise_door"
DOOR_MOVE_LOWER_DOOR = "door_move_lower_door"
DOOR_MOVE_OPEN_DOOR = "door_move_open_door"
DOOR_MOVE_CLOSE_DOOR = "door_move_close_door"
DOOR_MOVE_STOP_DOOR = "door_move_stop_door"
DOOR_MOVE_SINGLE_DOOR_OPEN = "door_move_single_door_open"
DOOR_MOVE_SINGLE_DOOR_CLOSE = "door_move_single_door_close"
DOOR_STATUS_REPORT_RAISE_DOOR = "door_status_report_raise_door"
DOOR_STATUS_REPORT_LOWER_DOOR = "door_status_report_lower_door"
DOOR_STATUS_REPORT_OPEN_DOOR = "door_status_report_open_door"
DOOR_STATUS_REPORT_CLOSE_DOOR = "door_status_report_close_door"
DOOR_STATUS_REPORT_STOP_DOOR = "door_status_report_stop_door"
DOOR_STATUS_REPORT_SINGLE_DOOR_OPEN = "door_status_report_single_door_open"
DOOR_STATUS_REPORT_SINGLE_DOOR_CLOSE = "door_status_report_single_door_close"
WINDOW_COVERING_OPEN = "window_covering_open"
WINDOW_COVERING_CLOSE = "window_covering_close"
WINDOW_COVERING_STOP = "window_covering_stop"
WINDOW_COVERING_PROGRAM = "window_covering_program"
WINDOW_COVERING_POSITION = "window_covering_position"
THERMOSTAT_TEMPERATURE_UP = "thermostat_temperature_up"
THERMOSTAT_TEMPERATURE_DOWN = "thermostat_temperature_down"
THERMOSTAT_GET_ZONE_INFORMATION = "thermostat_get_zone_information"
THERMOSTAT_CONTROL = "thermostat_control"
THERMOSTAT_SET_COOL_SETPOINT = "thermostat_set_cool_setpoint"
THERMOSTAT_SET_HEAT_SETPOINT = "thermostat_set_heat_setpoint"
THERMOSTAT_EXTENDED_STATUS = "thermostat_extended_status"
THERMOSTAT_TEMPERATURE_STATUS = "thermostat_temperature_status"
THERMOSTAT_HUMIDITY_STATUS = "thermostat_humidity_status"
THERMOSTAT_MODE_STATUS = "thermostat_mode_status"
THERMOSTAT_COOL_SET_POINT_STATUS = "thermostat_cool_set_point_status"
THERMOSTAT_HEAT_SET_POINT_STATUS = "thermostat_heat_set_point_status"
LEAK_DETECTOR_ANNOUNCE = "leak_detector_announce"
ASSIGN_TO_COMPANION_GROUP = "assign_to_companion_group"
SET_SPRINKLER_PROGRAM = "set_sprinkler_program"
SPRINKLER_GET_PROGRAM_RESPONSE = "sprinkler_get_program_response"
IO_SET_SENSOR_NOMINAL_VALUE = "io_set_sensor_nominal_value"
IO_ALARM_DATA_RESPONSE = "io_alarm_data_response"
POOL_SET_DEVICE_TEMPERATURE = "pool_set_device_temperature"
POOL_SET_DEVICE_HYSTERESIS = "pool_set_device_hysteresis"
THERMOSTAT_ZONE_TEMPERATURE_UP = "thermostat_zone_temperature_up"
THERMOSTAT_ZONE_TEMPERATURE_DOWN = "thermostat_zone_temperature_down"
THERMOSTAT_SET_ZONE_HEAT_SETPOINT = "thermostat_set_zone_heat_setpoint"
THERMOSTAT_SET_ZONE_COOL_SETPOINT = "thermostat_set_zone_cool_setpoint"
DEVICE_LINK_CONTROLLER_CREATED = "device_link_controller_created"
DEVICE_LINK_CONTROLLER_REMOVED = "device_link_controller_removed"
DEVICE_LINK_RESPONDER_CREATED = "device_link_responder_created"
DEVICE_LINK_RESPONDER_REMOVED = "device_link_responder_removed"
ALDB_VERSION = "aldb_version"
ALDB_STATUS_CHANGED = "aldb_status_changed"
| 48.564516 | 83 | 0.878224 | [
"MIT"
] | bshep/pyinsteon | pyinsteon/topics.py | 9,033 | Python |
import numpy as np
import dem as d
from numpy.fft import fft2, ifft2, ifftshift
def calc_cdf(ks_grids, area_grids, vmax=400, R2_cutoff = 0.0, area_cutoff = 2E6, density_weighting_distance = False):
ks_vals = np.array([])
n_vals = np.array([])
R2_vals = np.array([])
density_vals = np.array([])
for (ks_grid, area_grid) in zip(ks_grids, area_grids):
i = np.where(~np.isnan(ks_grid._griddata) & (ks_grid._griddata >= 0) & (area_grid._griddata >= area_cutoff))
ks_vals = np.concatenate((ks_vals, ks_grid._griddata[i]))
n_vals = np.concatenate((n_vals, ks_grid._n[i]))
R2_vals = np.concatenate((R2_vals, ks_grid._r2[i]))
if density_weighting_distance is not False:
template_grid = np.zeros_like(ks_grid._griddata)
(ny, nx) = template_grid.shape
(cy, cx) = (ny/2.0, nx/2.0)
dy, dx = np.meshgrid(np.arange(0,ny)-cy, np.arange(0,nx)-cx, indexing = 'ij')
d = np.sqrt(np.power(dx,2) + np.power(dy,2))
j = np.where(d <= density_weighting_distance)
template_grid[j] = 1.0
de = area_grid._area_per_pixel()
ks_bin = (~np.isnan(ks_grid._griddata) & (area_grid >= area_cutoff)).astype(float)*de
template_F = fft2(template_grid)
density_weight = np.real(ifftshift(ifft2(template_F*fft2(de))) / ifftshift(ifft2(template_F*fft2(ks_bin))))
density_vals = np.concatenate((density_vals, density_weight[i]))
i = np.where(R2_vals >= R2_cutoff)
ks_vals = ks_vals[i]
n_vals = n_vals[i]
if density_weighting_distance is not False:
density_vals = density_vals[i]
i = np.argsort(ks_vals)
ks_vals = ks_vals[i]
n_vals = n_vals[i]
weights = 1 / n_vals
if density_weighting_distance is not False:
density_vals = density_vals[i]
weights *= density_vals
bins = np.concatenate((np.array([-0.5]), np.arange(0.5, vmax, 1),np.array([vmax])+0.5, np.array([np.max(ks_vals[:])])))
hist, _ = np.histogram(ks_vals, bins = bins, weights = weights)
bin_centers = np.concatenate((np.arange(0,vmax,1),np.array([vmax])))
cdf = np.cumsum(hist)
cdf /= cdf[-1]
cdf = cdf[0:-1]
return bin_centers, cdf | 44.076923 | 123 | 0.617801 | [
"MIT"
] | gehilley/GlobalSteepness | GlobalDataset/bin/steepness_cdf.py | 2,292 | Python |
"""Interfaces for interactively entering guesses."""
import curses
import time
import click
from wordle_cheater.interface_base import WordleCheaterUI
class CursesInterface(WordleCheaterUI):
"""Interface for using the curses library to enter guesses and display solutions.
Attributes
----------
guesses : list of WordleLetter objects
The currently entered guesses.
entering_letters : bool
Whether or not we are currently entering guesses.
"""
@classmethod
def init_and_run(cls, *args, **kwargs):
"""Instantiate and run `self.main()` using `curses.wrapper`.
Parameters
----------
*args : tuple
Positional arguments to be passed to the CursesInterface constructor.
**kwargs : dict, optional
Keyword arguments to be passed to the CursesInterface constructor.
Returns
-------
CursesInterface object
An instance of the CursesInterface class.
"""
ui = cls(*args, **kwargs)
curses.wrapper(ui.main)
return ui
def main(self, stdscr):
"""Run the interface.
Should typically be called using `curses.wrapper`.
Parameters
----------
stdscr : curses.Window object
The curses screen which the user interacts with.
"""
self.stdscr = stdscr
curses.use_default_colors()
curses.init_pair(1, curses.COLOR_WHITE, curses.COLOR_BLACK) # White on black
curses.init_pair(2, curses.COLOR_BLACK, curses.COLOR_YELLOW) # Black on yellow
curses.init_pair(3, curses.COLOR_BLACK, curses.COLOR_GREEN) # Black on green
curses.init_pair(4, curses.COLOR_BLACK, curses.COLOR_RED) # Black on red
height, width = stdscr.getmaxyx()
self.results_window = curses.newwin(
height - 12, width, 12, 0
) # window for printing results
x0 = width // 2 - 3
y0 = 5
self.print_title()
self.enter_letters(x0=x0, y0=y0)
self.print_results()
self.set_cursor_visibility(False)
self.get_key()
def center_print(self, y, string, *args, **kwargs):
"""Print in the center of the screen.
Parameters
----------
y : int
The vertical location at which to print.
string : str
The string to print.
*args : tuple
Additional arguments to be passed to `stdscr.addstr`.
**kwargs : dict, optional
Keyword arguments to be passed to `stdscr.addstr`.
"""
height, width = self.stdscr.getmaxyx()
str_length = len(string)
x_mid = width // 2
self.stdscr.addstr(y, x_mid - str_length // 2, string, *args, **kwargs)
def print_title(self):
"""Print title and instructions."""
self.center_print(1, "Wordle Cheater :(", curses.A_BOLD)
self.center_print(2, "Enter guesses below.")
self.center_print(3, "spacebar: change color", curses.A_DIM)
def print_results(self, sep=" "):
"""Print possible solutions given guesses.
Parameters
----------
sep : str, optional
The string to display between each possible solution.
"""
height, width = self.results_window.getmaxyx()
max_rows = height - 1 # -1 to account for "Possible solutions" header
cols = width // (5 + len(sep))
out_str = self.get_results_string(max_rows=max_rows, max_cols=cols, sep=sep)
self.results_window.clear()
self.results_window.addstr(0, 0, "Possible solutions:", curses.A_UNDERLINE)
self.results_window.addstr(1, 0, out_str)
self.results_window.refresh()
def print(self, x, y, string, c=None):
"""Print `string` at coordinates `x`, `y`.
Parameters
----------
x : int
Horizontal position at which to print the string.
y : int
Height at which to print the string.
string : str
The string to print.
c : str, {None, 'black', 'yellow', 'green', 'red'}
The color in which to print. Must be one of
['black', 'yellow', 'green', 'red'] or None. If `c` is None, it should
print in the default color pair.
"""
if c is None:
self.stdscr.addstr(y, x, string)
elif c == "black":
self.stdscr.addstr(y, x, string, curses.color_pair(1))
elif c == "yellow":
self.stdscr.addstr(y, x, string, curses.color_pair(2))
elif c == "green":
self.stdscr.addstr(y, x, string, curses.color_pair(3))
elif c == "red":
self.stdscr.addstr(y, x, string, curses.color_pair(4))
else:
raise ValueError(
"`c` must be one of ['black', 'yellow', 'green', 'red'] or none."
)
def sleep(self, ms):
"""Temporarily suspend execution.
Parameters
----------
ms : int
Number of miliseconds before execution resumes.
"""
curses.napms(ms)
self.stdscr.refresh()
def move_cursor(self, x, y):
"""Move cursor to position `x`, `y`.
Parameters
----------
x : int
Desired horizontal position of cursor.
y : int
Desired vertical position of cursor.
"""
self.stdscr.move(y, x)
def set_cursor_visibility(self, visible):
"""Set cursor visibility.
Parameters
----------
visible : bool
Whether or not the cursor is visible.
"""
curses.curs_set(visible)
def get_key(self):
"""Get a key press.
Returns
-------
key : str
The key that was pressed.
"""
return self.stdscr.getkey()
def is_enter(self, key):
"""Check if `key` is the enter/return key.
Parameters
----------
key : str
The key to check.
Returns
-------
is_enter : bool
True if `key` is the enter or return key, False otherwise.
"""
if key == curses.KEY_ENTER or key == "\n" or key == "\r":
return True
else:
return False
def is_backspace(self, key):
"""Check if `key` is the backspace/delete key.
Parameters
----------
key : str
The key to check.
Returns
-------
is_backspace : bool
True if `key` is the backspace or delete key, False otherwise.
"""
if key == curses.KEY_BACKSPACE or key == "\b" or key == "\x7f":
return True
else:
return False
class ClickInterface(WordleCheaterUI):
"""Interface for using Click alone to enter letters and see solutions.
Parameters
----------
max_rows : int, optional
The maximum rows of possible solutions to print.
max_cols : int, optional
The maximum columns of possible solutions to print.
x0 : int, optional
The leftmost position where guesses will be entered.
y0 : int, optional
The topmost position where guesses will be entered.
esc : str, optional
The ANSI escape code for the terminal.
Attributes
----------
guesses : list of WordleLetter
The currently entered guesses.
entering_letters : bool
Whether or not we are currently entering guesses.
max_rows : int, optional
The maximum rows of possible solutions to print.
max_cols : int, optional
The maximum columns of possible solutions to print.
x0 : int, optional
The leftmost position where guesses will be entered.
y0 : int, optional
The topmost position where guesses will be entered.
esc : str, optional
The ANSI escape code for the terminal.
line_lengths : list of int
The highest x value we've printed to per line. For example, if we've printed
two lines, the first one up to x=5 and the second up to x=3, then
`line_lengths = [5, 3]`.
curs_xy
"""
def __init__(self, max_rows=10, max_cols=8, x0=4, y0=4, esc="\033"):
self.max_rows = max_rows # Maximum rows of results to print
self.max_cols = max_cols # Maximum columns of results to print
self.x0 = x0 # Initial x position of guesses
self.y0 = y0 # Initial y position of guesses
self.esc = esc # ANSI escape code
self._curs_xy = (0, 0) # cursor position
self.line_lengths = [0] # Highest x values we've hit per line
super().__init__()
@property
def curs_xy(self):
"""Location of cursor."""
return self._curs_xy
@curs_xy.setter
def curs_xy(self, xy):
"""Update max line lengths when we update cursor position."""
x, y = xy
if y > len(self.line_lengths) - 1:
self.line_lengths += [0 for i in range(y - len(self.line_lengths) + 1)]
if x > self.line_lengths[y]:
self.line_lengths[y] = x
self._curs_xy = xy
def main(self):
"""Run the interface."""
try:
self.print_title()
self.enter_letters(x0=self.x0, y0=self.y0)
self.print_results()
finally:
self.set_cursor_visibility(True)
def print_title(self):
"""Print title and instructions."""
self.print(0, 0, "Wordle Cheater :(", bold=True)
self.print(0, 1, "Enter guesses below.")
self.print(0, 2, "spacebar: change color", dim=True)
def print_results(self):
"""Print possible solutions given guesses."""
# If we're still entering letters, don't do anything
if self.entering_letters:
return
out_str = self.get_results_string(
max_rows=self.max_rows, max_cols=self.max_cols, sep=" "
)
self.move_cursor(0, self.curs_xy[1] + 1)
click.secho("Possible solutions:", underline=True)
click.echo(out_str)
def print(self, x, y, string, c=None, *args, **kwargs):
"""Print `string` at coordinates `x`, `y`.
Parameters
----------
x : int
Horizontal position at which to print the string.
y : int
Height at which to print the string.
string : str
The string to print.
c : str, {None, 'black', 'yellow', 'green', 'red'}
The color in which to print. Must be one of
['black', 'yellow', 'green', 'red'] or None. If `c` is None, it should
print in the default color pair.
*args : tuple
Additional arguments to be passed to `click.secho`.
**kwargs : dict, optional
Keyword arguments to be passed to `click.secho`.
"""
# Move cursor to x, y so we can print there
self.move_cursor(x, y)
if c is None:
click.secho(string, nl=False, *args, **kwargs)
elif c == "black":
click.secho(string, fg="white", bg="black", nl=False)
elif c == "yellow":
click.secho(string, fg="black", bg="yellow", nl=False)
elif c == "green":
click.secho(string, fg="black", bg="green", nl=False)
elif c == "red":
click.secho(string, fg="black", bg="red", nl=False)
else:
raise ValueError(
"`c` must be one of ['black', 'yellow', 'green', 'red'] or none."
)
self.curs_xy = (self.curs_xy[0] + len(string), self.curs_xy[1])
def sleep(self, ms):
"""Temporarily suspend execution.
Parameters
----------
ms : int
Number of miliseconds before execution resumes.
"""
time.sleep(ms / 1000)
def move_cursor(self, x, y):
"""Move cursor to position `x`, `y`.
Parameters
----------
x : int
Desired horizontal position of cursor.
y : int
Desired vertical position of cursor.
"""
# Check if we want to move cursor up (decreasing y)
if self.curs_xy[1] > y:
click.echo(f"{self.esc}[{self.curs_xy[1] - y}A", nl=False)
# Check if we want to move cursor down (increasing y)
elif self.curs_xy[1] < y:
# Check if we need to add new lines to screen
if len(self.line_lengths) - 1 < y:
# First arrow down as far as possible
click.echo(
f"{self.esc}[{(len(self.line_lengths) - 1) - self.curs_xy[1]}B",
nl=False,
)
# Now add blank lines
click.echo("\n" * (y - (len(self.line_lengths) - 1)), nl=False)
# New line, so definitely need to print spaces to move x
click.echo(" " * x, nl=False)
self.curs_xy = (x, y)
return
else:
# Should just arrow down to not overwrite stuff
click.echo(f"{self.esc}[{y - self.curs_xy[1]}B", nl=False)
# Check if we want to move cursor left (decreasing x)
if self.curs_xy[0] > x:
click.echo(f"{self.esc}[{self.curs_xy[0] - x}D", nl=False)
# Check if we want to move cursor right (increasing x)
elif self.curs_xy[0] < x:
# Check if we need to add space to right of cursor
if self.line_lengths[y] > x:
# First arrow to the right as far as possible
click.echo(
f"{self.esc}[{self.line_lengths[y] - self.curs_xy[0]}C", nl=False
)
# Now add blank spaces
click.echo(" " * (x - self.line_lengths[y]), nl=False)
else:
# Should just arrow to right to not overwrite stuff
click.echo(f"{self.esc}[{x - self.curs_xy[0]}C", nl=False)
self.curs_xy = (x, y)
def set_cursor_visibility(self, visible):
"""Set cursor visibility.
Parameters
----------
visible : bool
Whether or not the cursor is visible.
"""
if visible:
click.echo(f"{self.esc}[?25h", nl=False)
else:
click.echo(f"{self.esc}[?25l", nl=False)
def get_key(self):
"""Get a key press.
Returns
-------
key : str
The key that was pressed.
"""
return click.getchar()
def is_enter(self, key):
"""Check if `key` is the enter/return key.
Parameters
----------
key : str
The key to check.
Returns
-------
is_enter : bool
True if `key` is the enter or return key, False otherwise.
"""
if key == "\r" or key == "\n":
return True
else:
return False
def is_backspace(self, key):
"""Check if `key` is the backspace/delete key.
Parameters
----------
key : str
The key to check.
Returns
-------
is_backspace : bool
True if `key` is the backspace or delete key, False otherwise.
"""
if key == "\b" or key == "\x7f":
return True
else:
return False
if __name__ == "__main__":
# curses_ui = CursesInterface()
# curses.wrapper(curses_ui.main)
click_ui = ClickInterface()
click_ui.main()
| 30.447266 | 87 | 0.545641 | [
"MIT"
] | edsq/wordle-cheater | src/wordle_cheater/interface.py | 15,589 | Python |
import asyncio
import functools
import operator
from typing import (
cast,
Iterable,
NamedTuple,
Sequence,
Type,
Tuple,
)
from cached_property import cached_property
from eth_utils import (
ExtendedDebugLogger,
to_tuple,
)
from eth_utils.toolz import groupby, valmap
from eth_keys import keys
from p2p._utils import duplicates, get_logger
from p2p.abc import (
ConnectionAPI,
HandshakerAPI,
HandshakeReceiptAPI,
MultiplexerAPI,
NodeAPI,
TransportAPI,
TProtocol,
ProtocolAPI,
)
from p2p.connection import Connection
from p2p.constants import DEVP2P_V5
from p2p.disconnect import DisconnectReason
from p2p.exceptions import (
HandshakeFailure,
HandshakeFailureTooManyPeers,
NoMatchingPeerCapabilities,
)
from p2p.multiplexer import (
stream_transport_messages,
Multiplexer,
)
from p2p.p2p_proto import (
DevP2PReceipt,
Disconnect,
Hello,
HelloPayload,
BaseP2PProtocol,
P2PProtocolV4,
P2PProtocolV5,
)
from p2p.protocol import get_cmd_offsets
from p2p.transport import Transport
from p2p.typing import (
Capabilities,
Capability,
)
class Handshaker(HandshakerAPI[TProtocol]):
"""
Base class that handles the handshake for a given protocol. The primary
justification for this class's existence is to house parameters that are
needed for the protocol handshake.
"""
@cached_property
def logger(self) -> ExtendedDebugLogger:
return get_logger('p2p.handshake.Handshaker')
class DevP2PHandshakeParams(NamedTuple):
client_version_string: str
listen_port: int
version: int
def get_base_protocol_class(self) -> Type[BaseP2PProtocol]:
if self.version == 5:
return P2PProtocolV5
elif self.version == 4:
return P2PProtocolV4
else:
raise Exception(
f"Unknown protocol version: {self.version}. Expected one of "
f"`4` or `5`"
)
@to_tuple
def _select_capabilities(remote_capabilities: Capabilities,
local_capabilities: Capabilities) -> Iterable[Capability]:
"""
Select the appropriate shared capabilities between local and remote.
https://github.com/ethereum/devp2p/blob/master/rlpx.md#capability-messaging
"""
# Determine the remote capabilities that intersect with our own.
matching_capabilities = tuple(sorted(
set(local_capabilities).intersection(remote_capabilities),
key=operator.itemgetter(0),
))
# generate a dictionary of each capability grouped by name and sorted by
# version in descending order.
sort_by_version = functools.partial(sorted, key=operator.itemgetter(1), reverse=True)
capabilities_by_name = valmap(
tuple,
valmap(
sort_by_version,
groupby(operator.itemgetter(0), matching_capabilities),
),
)
# now we loop over the names that have a matching capability and return the
# *highest* version one.
for name in sorted(capabilities_by_name.keys()):
yield capabilities_by_name[name][0]
async def _do_p2p_handshake(transport: TransportAPI,
capabilities: Capabilities,
p2p_handshake_params: DevP2PHandshakeParams,
base_protocol: BaseP2PProtocol,
) -> Tuple[DevP2PReceipt, BaseP2PProtocol]:
client_version_string, listen_port, p2p_version = p2p_handshake_params
base_protocol.send(Hello(HelloPayload(
client_version_string=client_version_string,
capabilities=capabilities,
listen_port=listen_port,
version=p2p_version,
remote_public_key=transport.public_key.to_bytes(),
)))
# The base `p2p` protocol handshake directly streams the messages as it has
# strict requirements about receiving the `Hello` message first.
async for _, cmd in stream_transport_messages(transport, base_protocol):
if isinstance(cmd, Disconnect):
if cmd.payload == DisconnectReason.TOO_MANY_PEERS:
raise HandshakeFailureTooManyPeers(f"Peer disconnected because it is already full")
if not isinstance(cmd, Hello):
raise HandshakeFailure(
f"First message across the DevP2P connection must be a Hello "
f"msg, got {cmd}, disconnecting"
)
protocol: BaseP2PProtocol
if base_protocol.version >= DEVP2P_V5:
# Check whether to support Snappy Compression or not
# based on other peer's p2p protocol version
snappy_support = cmd.payload.version >= DEVP2P_V5
if snappy_support:
# Now update the base protocol to support snappy compression
# This is needed so that Trinity is compatible with parity since
# parity sends Ping immediately after handshake
protocol = P2PProtocolV5(
transport,
command_id_offset=0,
snappy_support=True,
)
else:
protocol = base_protocol
else:
protocol = base_protocol
devp2p_receipt = DevP2PReceipt(
protocol=protocol,
version=cmd.payload.version,
client_version_string=cmd.payload.client_version_string,
capabilities=cmd.payload.capabilities,
remote_public_key=cmd.payload.remote_public_key,
listen_port=cmd.payload.listen_port,
)
break
else:
raise HandshakeFailure("DevP2P message stream exited before finishing handshake")
return devp2p_receipt, protocol
async def negotiate_protocol_handshakes(transport: TransportAPI,
p2p_handshake_params: DevP2PHandshakeParams,
protocol_handshakers: Sequence[HandshakerAPI[ProtocolAPI]],
) -> Tuple[MultiplexerAPI, DevP2PReceipt, Tuple[HandshakeReceiptAPI, ...]]: # noqa: E501
"""
Negotiate the handshakes for both the base `p2p` protocol and the
appropriate sub protocols. The basic logic follows the following steps.
* perform the base `p2p` handshake.
* using the capabilities exchanged during the `p2p` handshake, select the
appropriate sub protocols.
* allow each sub-protocol to perform its own handshake.
* return the established `Multiplexer` as well as the `HandshakeReceipt`
objects from each handshake.
"""
# The `p2p` Protocol class that will be used.
p2p_protocol_class = p2p_handshake_params.get_base_protocol_class()
# Collect our local capabilities, the set of (name, version) pairs for all
# of the protocols that we support.
local_capabilities = tuple(
handshaker.protocol_class.as_capability()
for handshaker
in protocol_handshakers
)
# Verify that there are no duplicated local or remote capabilities
duplicate_capabilities = duplicates(local_capabilities)
if duplicate_capabilities:
raise Exception(f"Duplicate local capabilities: {duplicate_capabilities}")
# We create an *ephemeral* version of the base `p2p` protocol with snappy
# compression disabled for the handshake. As part of the handshake, a new
# instance of this protocol will be created with snappy compression enabled
# if it is supported by the protocol version.
ephemeral_base_protocol = p2p_protocol_class(
transport,
command_id_offset=0,
snappy_support=False,
)
# Perform the actual `p2p` protocol handshake. We need the remote
# capabilities data from the receipt to select the appropriate sub
# protocols.
devp2p_receipt, base_protocol = await _do_p2p_handshake(
transport,
local_capabilities,
p2p_handshake_params,
ephemeral_base_protocol,
)
# This data structure is simply for easy retrieval of the proper
# `Handshaker` for each selected protocol.
protocol_handshakers_by_capability = dict(zip(local_capabilities, protocol_handshakers))
# Using our local capabilities and the ones transmitted by the remote
# select the highest shared version of each shared protocol.
selected_capabilities = _select_capabilities(
devp2p_receipt.capabilities,
local_capabilities,
)
# If there are no capability matches throw an exception.
if len(selected_capabilities) < 1:
raise NoMatchingPeerCapabilities(
"Found no matching capabilities between self and peer:\n"
f" - local : {tuple(sorted(local_capabilities))}\n"
f" - remote: {devp2p_receipt.capabilities}"
)
# Retrieve the handshakers which correspond to the selected protocols.
# These are needed to perform the actual handshake logic for each protocol.
selected_handshakers = tuple(
protocol_handshakers_by_capability[capability]
for capability in selected_capabilities
)
# Grab the `Protocol` class for each of the selected protocols. We need
# this to compute the offsets for each protocol's command ids, as well as
# for instantiation of the protocol instances.
selected_protocol_types = tuple(
handshaker.protocol_class
for handshaker
in selected_handshakers
)
# Compute the offsets for each protocol's command ids
protocol_cmd_offsets = get_cmd_offsets(selected_protocol_types)
# Now instantiate instances of each of the protocol classes.
selected_protocols = tuple(
protocol_class(transport, command_id_offset, base_protocol.snappy_support)
for protocol_class, command_id_offset
in zip(selected_protocol_types, protocol_cmd_offsets)
)
# Create `Multiplexer` to abstract all of the protocols into a single
# interface to stream only messages relevant to the given protocol.
multiplexer = Multiplexer(transport, base_protocol, selected_protocols)
# This context manager runs a background task which reads messages off of
# the `Transport` and feeds them into protocol specific queues. Each
# protocol is responsible for reading its own messages from that queue via
# the `Multiplexer.stream_protocol_messages` API.
await multiplexer.stream_in_background()
# Concurrently perform the handshakes for each protocol, gathering up
# the returned receipts.
try:
protocol_receipts = cast(Tuple[HandshakeReceiptAPI, ...], await asyncio.gather(*(
handshaker.do_handshake(multiplexer, protocol)
for handshaker, protocol
in zip(selected_handshakers, selected_protocols)
)))
except BaseException as handshake_err:
# If the multiplexer has a streaming error, that will certainly be the cause of
# whatever handshake error we got, so raise that instead.
multiplexer.raise_if_streaming_error()
# Ok, no streaming error from the multiplexer, so stop it and raise the handshake error.
await multiplexer.stop_streaming()
raise handshake_err
else:
# The handshake was successful, but there's a chance the multiplexer's streaming stopped
# after that, so we may raise that here to prevent an attempt to use a stopped multiplexer
# further.
multiplexer.raise_if_streaming_error()
# Return the `Multiplexer` object as well as the handshake receipts. The
# `Multiplexer` object acts as a container for the individual protocol
# instances.
return multiplexer, devp2p_receipt, protocol_receipts
async def dial_out(remote: NodeAPI,
private_key: keys.PrivateKey,
p2p_handshake_params: DevP2PHandshakeParams,
protocol_handshakers: Sequence[HandshakerAPI[ProtocolAPI]],
) -> ConnectionAPI:
"""
Perform the auth and P2P handshakes with the given remote.
Return a `Connection` object housing all of the negotiated sub protocols.
Raises UnreachablePeer if we cannot connect to the peer or
HandshakeFailure if the remote disconnects before completing the
handshake or if none of the sub-protocols supported by us is also
supported by the remote.
"""
transport = await Transport.connect(
remote,
private_key,
)
transport.logger.debug2("Initiating p2p handshake with %s", remote)
try:
multiplexer, devp2p_receipt, protocol_receipts = await negotiate_protocol_handshakes(
transport=transport,
p2p_handshake_params=p2p_handshake_params,
protocol_handshakers=protocol_handshakers,
)
except BaseException:
# Note: This is one of two places where we manually handle closing the
# reader/writer connection pair in the event of an error during the
# peer connection and handshake process.
# See `p2p.auth.handshake` for the other.
try:
await transport.close()
except ConnectionResetError:
transport.logger.debug("Could not wait for transport to close")
raise
transport.logger.debug2("Completed p2p handshake with %s", remote)
connection = Connection(
multiplexer=multiplexer,
devp2p_receipt=devp2p_receipt,
protocol_receipts=protocol_receipts,
is_dial_out=True,
)
return connection
async def receive_dial_in(reader: asyncio.StreamReader,
writer: asyncio.StreamWriter,
private_key: keys.PrivateKey,
p2p_handshake_params: DevP2PHandshakeParams,
protocol_handshakers: Sequence[HandshakerAPI[ProtocolAPI]],
) -> Connection:
transport = await Transport.receive_connection(
reader=reader,
writer=writer,
private_key=private_key,
)
try:
multiplexer, devp2p_receipt, protocol_receipts = await negotiate_protocol_handshakes(
transport=transport,
p2p_handshake_params=p2p_handshake_params,
protocol_handshakers=protocol_handshakers,
)
except BaseException:
# Note: This is one of two places where we manually handle closing the
# reader/writer connection pair in the event of an error during the
# peer connection and handshake process.
# See `p2p.auth.handshake` for the other.
try:
await transport.close()
except ConnectionResetError:
transport.logger.debug("Could not wait for transport to close")
raise
connection = Connection(
multiplexer=multiplexer,
devp2p_receipt=devp2p_receipt,
protocol_receipts=protocol_receipts,
is_dial_out=False,
)
return connection
| 37.890863 | 129 | 0.681224 | [
"MIT"
] | g-r-a-n-t/trinity | p2p/handshake.py | 14,929 | Python |
"""
A module that contains utility functions to load the 'classical' workspace configuration.
This configuration may have three meaningful files:
.remote (required) - information about the connection options
.remoteindex (optional) - information about which connection from options above to use
.remoteignore (optional) - information about files that should be ignore when syncing files
"""
import os
import re
from collections import defaultdict
from dataclasses import asdict
from pathlib import Path
from typing import Dict, List, Tuple
from remote.exceptions import ConfigurationError
from . import ConfigurationMedium, RemoteConfig, SyncRules, WorkspaceConfig
from .shared import DEFAULT_REMOTE_ROOT, hash_path
CONFIG_FILE_NAME = ".remote"
INDEX_FILE_NAME = ".remoteindex"
IGNORE_FILE_NAME = ".remoteignore"
IGNORE_SECTION_REGEX = re.compile(r"^(push|pull|both)\s*:$")
BASE_IGNORES = (CONFIG_FILE_NAME, INDEX_FILE_NAME, IGNORE_FILE_NAME)
DEFAULT_SHELL = "sh"
DEFAULT_SHELL_OPTIONS = ""
def _extract_shell_info(line: str, env_vars: List[str]) -> Tuple[str, str]:
if not env_vars:
return DEFAULT_SHELL, DEFAULT_SHELL_OPTIONS
vars_string = env_vars[0]
env = {}
items = vars_string.split()
index = 0
while index < len(items):
key, value = items[index].split("=")
if value.startswith("'") or value.startswith('"'):
control_character = value[0]
while index < len(items) - 1:
if value[-1] == control_character:
break
index += 1
value += " " + items[index]
if not value[-1] == control_character:
raise ConfigurationError(f"Config line {line} is corrupted. Cannot parse end {key}={value}")
env[key] = value.strip("\"'")
index += 1
print(env)
# TODO: these shell types are not used in new implementation, need to remove them
shell = env.pop("RSHELL", DEFAULT_SHELL)
shell_options = env.pop("RSHELL_OPTS", DEFAULT_SHELL_OPTIONS)
if env:
raise ConfigurationError(
f"Config line {line} contains unexpected env variables: {env}. Only RSHELL and RSHELL_OPTS can be used"
)
return shell, shell_options
def parse_config_line(line: str) -> RemoteConfig:
# The line should look like this:
# sdas-ld2:.remotes/814f27f15f4e7a0842cada353dfc765a RSHELL=zsh
entry, *env_items = line.split(maxsplit=1)
shell, shell_options = _extract_shell_info(line, env_items)
parts = entry.split(":")
if len(parts) != 2:
raise ConfigurationError(
f"The configuration string is malformed: {parts}. Please use host-name:remote_dir format"
)
host, directory = parts
return RemoteConfig(host=host, directory=Path(directory), shell=shell, shell_options=shell_options)
def load_configurations(workspace_root: Path) -> List[RemoteConfig]:
config_file = workspace_root / CONFIG_FILE_NAME
configurations = []
for line in config_file.read_text().splitlines():
line = line.strip()
if not line or line.startswith("#"):
continue
configurations.append(parse_config_line(line))
return configurations
def load_default_configuration_num(workspace_root: Path) -> int:
# If REMOTE_HOST_INDEX is set, that overrides settings in .remoteindex
env_index = os.environ.get("REMOTE_HOST_INDEX")
if env_index:
try:
return int(env_index)
except ValueError:
raise ConfigurationError(
f"REMOTE_HOST_INDEX env variable contains symbols other than numbers: '{env_index}'. "
"Please set the coorect index value to continue"
)
index_file = workspace_root / INDEX_FILE_NAME
if not index_file.exists():
return 0
# Configuration uses 1-base index and we need to have 0-based
text = index_file.read_text().strip()
try:
return int(text) - 1
except ValueError:
raise ConfigurationError(
f"File {index_file} contains symbols other than numbers: '{text}'. "
"Please remove it or replace the value to continue"
)
def _postprocess(ignores):
pull = ignores.pop("pull", [])
push = ignores.pop("push", [])
both = ignores.pop("both", [])
if ignores:
raise ConfigurationError(
f"{IGNORE_FILE_NAME} file has unexpected sections: {', '.join(ignores.keys())}. Please remove them"
)
return SyncRules(pull=pull, push=push, both=both)
def load_ignores(workspace_root: Path) -> SyncRules:
ignores: Dict[str, List[str]] = defaultdict(list)
ignores["both"].extend(BASE_IGNORES)
ignore_file = workspace_root / IGNORE_FILE_NAME
if not ignore_file.exists():
return _postprocess(ignores)
active_section = "both"
is_new_format = None
for line in ignore_file.read_text().splitlines():
line = line.strip()
if not line or line.startswith("#"):
continue
matcher = IGNORE_SECTION_REGEX.match(line)
if matcher is None:
if is_new_format is None:
is_new_format = False
ignores[active_section].append(line)
else:
if is_new_format is None:
is_new_format = True
elif not is_new_format:
raise ConfigurationError(
f"Few ignore patters were listed in {IGNORE_FILE_NAME} before the first section {matcher.group(1)} appeared. "
"Please list all ignored files after a section declaration if you use new ignore format"
)
active_section = matcher.group(1)
return _postprocess(ignores)
def save_general_config(config_file: Path, configurations: List[RemoteConfig]):
with config_file.open("w") as f:
for item in configurations:
f.write(f"{item.host}:{item.directory}")
if item.shell != "sh":
f.write(f" RSHELL={item.shell}")
if item.shell_options:
f.write(f" RSHELL_OPTS='{item.shell_options}'")
f.write("\n")
def save_ignores(config_file: Path, ignores: SyncRules):
ignores.both.extend(BASE_IGNORES)
ignores.trim()
if ignores.is_empty():
if config_file.exists():
config_file.unlink()
return
with config_file.open("w") as f:
for key, value in asdict(ignores).items():
f.write(f"{key}:\n")
for item in value:
f.write(f"{item}\n")
def save_index(config_file: Path, index: int):
if index == 0:
# We delete file when index is default
if config_file.exists():
config_file.unlink()
else:
config_file.write_text(f"{index + 1}\n")
class ClassicConfigurationMedium(ConfigurationMedium):
"""A medium class that knows how to load and save the 'classical' workspace configuration.
This configuration may have three meaningful files:
.remote (required) - information about the connection options
.remoteindex (optional) - information about which connection from options above to use
.remoteignore (optional) - information about files that should be ignore when syncing files
"""
def load_config(self, workspace_root: Path) -> WorkspaceConfig:
configurations = load_configurations(workspace_root)
configuration_index = load_default_configuration_num(workspace_root)
if configuration_index > len(configurations) - 1:
raise ConfigurationError(
f"Configuration #{configuration_index + 1} requested but there are only {len(configurations)} declared"
)
ignores = load_ignores(workspace_root)
return WorkspaceConfig(
root=workspace_root,
configurations=configurations,
default_configuration=configuration_index,
ignores=ignores,
includes=SyncRules.new(),
)
def save_config(self, config: WorkspaceConfig) -> None:
save_general_config(config.root / CONFIG_FILE_NAME, config.configurations)
save_ignores(config.root / IGNORE_FILE_NAME, config.ignores)
save_index(config.root / INDEX_FILE_NAME, config.default_configuration)
def is_workspace_root(self, path: Path) -> bool:
return (path / CONFIG_FILE_NAME).exists()
def generate_remote_directory(self, config: WorkspaceConfig) -> Path:
md5 = hash_path(config.root)
return Path(f"{DEFAULT_REMOTE_ROOT}/{config.root.name}_{md5}")
| 36.02521 | 130 | 0.660718 | [
"BSD-2-Clause"
] | cdoronc/remote | src/remote/configuration/classic.py | 8,574 | Python |
import csv
import logging
import os
import copy
import re
import random
from functools import reduce
import ast
import pandas
import pexpect
import avi.migrationtools.netscaler_converter.ns_constants as ns_constants
from pkg_resources import parse_version
from xlsxwriter import Workbook
from openpyxl import load_workbook
from urllib.parse import urlparse
from OpenSSL import crypto
from socket import gethostname
from avi.migrationtools.netscaler_converter.ns_constants \
import (STATUS_SKIPPED, STATUS_SUCCESSFUL, STATUS_INDIRECT,
STATUS_NOT_APPLICABLE, STATUS_PARTIAL, STATUS_DATASCRIPT,
STATUS_INCOMPLETE_CONFIGURATION, STATUS_COMMAND_NOT_SUPPORTED,
OBJECT_TYPE_POOL_GROUP, OBJECT_TYPE_POOL, STATUS_NOT_IN_USE,
OBJECT_TYPE_HTTP_POLICY_SET, STATUS_LIST, COMPLEXITY_ADVANCED,
COMPLEXITY_BASIC, OBJECT_TYPE_APPLICATION_PERSISTENCE_PROFILE,
OBJECT_TYPE_APPLICATION_PROFILE)
from avi.migrationtools.avi_migration_utils import MigrationUtil, update_count
LOG = logging.getLogger(__name__)
csv_writer_dict_list = []
skipped_setting = {
# 'virtual_service': '',
# 'ssl key and cert': {},
# 'ssl profile': {},
# 'pool group': {},
# 'health monitor': {},
# 'Httppolicy': {}
}
# Added variable for checking progress and get overall object.
progressbar_count = 0
total_count = 0
class NsUtil(MigrationUtil):
def add_conv_status(self, line_no, cmd, object_type, full_command, conv_status,
avi_object=None):
"""
Adds as status row in conversion status csv
:param line_no: line number of command
:param object_type:
:param full_command: netscaler command
:param conv_status: dict of conversion status
:param avi_object: Converted objectconverted avi object
"""
row = {
'Line Number': line_no if line_no else '',
'Netscaler Command': cmd if cmd else '',
'Object Name': object_type if object_type else '',
'Full Command': full_command if full_command else '',
'Status': conv_status.get('status', ''),
'Skipped settings': str(conv_status.get('skipped', '')),
'Indirect mapping': str(conv_status.get('indirect', '')),
'Not Applicable': str(conv_status.get('na_list', '')),
'User Ignored': str(conv_status.get('user_ignore', '')),
'AVI Object': str(avi_object) if avi_object else ''
}
csv_writer_dict_list.append(row)
def add_complete_conv_status(self, ns_config, output_dir, avi_config,
report_name, vs_level_status):
"""
Adds as status row in conversion status csv
:param ns_config: NS config dict
:param output_dir: output directory
:param avi_config: AVI config dict
:param report_name: name of report
:param vs_level_status: add vs level details in XL sheet
"""
global csv_writer_dict_list
global progressbar_count
global total_count
print("Generating Report For Converted Configuration...")
ptotal = len(ns_config)
ppcount = 0
for config_key in ns_config:
# increment progressbar count
ppcount += 1
config_object = ns_config[config_key]
msg = "Generating report"
self.print_progress_bar(ppcount, ptotal, msg, prefix='Progress',
suffix='')
for element_key in config_object:
element_object_list = config_object[element_key]
if isinstance(element_object_list, dict):
element_object_list = [element_object_list]
for element_object in element_object_list:
match = [match for match in csv_writer_dict_list if
match['Line Number'] == element_object['line_no']]
if not match:
ns_complete_command = self.get_netscalar_full_command(
config_key, element_object)
# Add status incomplete configuration
self.add_status_row(
element_object['line_no'], config_key,
element_object['attrs'][0], ns_complete_command,
STATUS_INCOMPLETE_CONFIGURATION)
unique_line_number_list = set()
row_list = []
for dict_row in csv_writer_dict_list:
if dict_row['Line Number'] not in unique_line_number_list:
unique_line_number_list.add(dict_row['Line Number'])
row_list.append(dict_row)
else:
row = [row for row in row_list
if row['Line Number'] == dict_row['Line Number']]
if str(dict_row['AVI Object']).startswith('Skipped'):
continue
if dict_row.get('AVI Object', None):
# Added condition to check unique status.
if str(row[0]['AVI Object']) != str(dict_row['AVI Object']):
row[0]['AVI Object'] += '__/__%s' % dict_row[
'AVI Object']
for status in STATUS_LIST:
status_list = [row for row in row_list if
row['Status'] == status]
print('%s: %s' % (status, len(status_list)))
# add skipped list of each object at vs level
print("Writing Excel Sheet For Converted Configuration...")
total_count = total_count + len(row_list)
if vs_level_status:
self.vs_per_skipped_setting_for_references(avi_config)
self.correct_vs_ref(avi_config)
else:
# Call to calculate vs complexity
self.vs_complexity_level()
# Write status report and pivot table in xlsx report
self.write_status_report_and_pivot_table_in_xlsx(
row_list, output_dir, report_name, vs_level_status)
def add_status_row(self, line_no, cmd, object_type, full_command, status,
avi_object=None):
"""
Adds as status row in conversion status csv
:param line_no:
:param cmd: netscaler command
:param object_type:
:param full_command:
:param status: conversion status
:param avi_object:
"""
global csv_writer_dict_list
row = {
'Line Number': line_no if line_no else '',
'Netscaler Command': cmd,
'Object Name': object_type,
'Full Command': full_command,
'Status': status,
'AVI Object': str(avi_object) if avi_object else ''
}
csv_writer_dict_list.append(row)
def add_csv_headers(self, csv_file):
"""
Adds header line in conversion status file
:param csv_file: File to which header is to be added
"""
global csv_writer
fieldnames = ['Line Number', 'Netscaler Command', 'Object Name',
'Full Command', 'Status', 'Skipped settings',
'Indirect mapping', 'Not Applicable', 'User Ignored',
'AVI Object']
csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames,
lineterminator='\n', )
csv_writer.writeheader()
def get_avi_lb_algorithm(self, ns_algorithm):
"""
Converts NS LB algorithm to equivalent avi LB algorithm
:param ns_algorithm: NS algorithm name
:return: Avi LB algorithm enum value
"""
avi_algorithm = 'LB_ALGORITHM_LEAST_CONNECTIONS'
if ns_algorithm == 'LEASTCONNECTIONS':
avi_algorithm = 'LB_ALGORITHM_LEAST_CONNECTIONS'
elif ns_algorithm == 'ROUNDROBIN':
avi_algorithm = 'LB_ALGORITHM_ROUND_ROBIN'
elif ns_algorithm in ['LEASTRESPONSETIME', 'LRTM']:
avi_algorithm = 'LB_ALGORITHM_FASTEST_RESPONSE'
elif ns_algorithm == 'SOURCEIPHASH':
avi_algorithm = 'LB_ALGORITHM_CONSISTENT_HASH'
elif ns_algorithm == 'URLHASH':
avi_algorithm = 'LB_ALGORITHM_CONSISTENT_HASH_URI'
return avi_algorithm
def update_algo_for_pools(self, algo, pg_name, avi_config):
pool_group = [pg for pg in avi_config['PoolGroup'] if
pg['name'] == pg_name][0]
for member in pool_group['members']:
pool_name = self.get_name(member['pool_ref'])
pool = [pool for pool in avi_config['Pool'] if
pool['name'] == pool_name][0]
pool['lb_algorithm'] = algo
def get_avi_resp_code(self, respCode):
"""
This function used for getting appropriate response code for avi.
:param respCode: response code
:return: returns list of unique responses.
"""
avi_resp_codes = []
codes = []
for res_code in respCode.split(' '):
if '-' in res_code:
codes.extend(res_code.split('-'))
else:
codes.append(res_code)
for code in codes:
if code and code.strip().isdigit():
# Converted to int.
code = int(code.strip())
if code < 200:
avi_resp_codes.append("HTTP_1XX")
elif code < 300:
avi_resp_codes.append("HTTP_2XX")
elif code < 400:
avi_resp_codes.append("HTTP_3XX")
elif code < 500:
avi_resp_codes.append("HTTP_4XX")
elif code < 600:
avi_resp_codes.append("HTTP_5XX")
# Get the unique dict from list.
avi_resp_codes = list(set(avi_resp_codes))
if not avi_resp_codes:
avi_resp_codes = ["HTTP_ANY"]
return avi_resp_codes
def get_conv_status(self, ns_object, skipped_list, na_list, indirect_list,
ignore_for_val=None, indirect_commands=None,
user_ignore_val=[]):
"""
This function used for getting status detail for command like
skipped or indirect.
:param ns_object: Netscaler parsed config
:param skipped_list: list of skipped commands list.
:param na_list: not applicable commands list.
:param indirect_list: indirect command list
:param ignore_for_val: optional field
:param indirect_commands: indirect commands
:param user_ignore_val: List of user ignore attributes
:return: returns dict of coversion status.
"""
skipped = [attr for attr in ns_object.keys() if attr in skipped_list]
na = [attr for attr in ns_object.keys() if attr in na_list]
indirect = [attr for attr in ns_object.keys() if attr in indirect_list]
# List of ignore attributes which are present in skipped
user_ignore = [val for val in skipped if val in user_ignore_val]
# Removed the attributes from skipped which are in user ignore list
skipped = [attr for attr in skipped if attr not in user_ignore_val]
if ignore_for_val:
for key in ignore_for_val.keys():
if key not in ns_object:
continue
ns_val = ns_object.get(key)
ignore_val = ignore_for_val.get(key)
if key in skipped and str(ns_val) == str(ignore_val):
skipped.remove(key)
if skipped:
status = STATUS_PARTIAL
else:
status = STATUS_SUCCESSFUL
conv_status = {
'skipped': skipped,
'indirect': indirect,
'na_list': na,
'status': status,
'user_ignore': user_ignore
}
return conv_status
def get_key_cert_obj(self, name, key_file_name, cert_file_name, input_dir):
"""
:param name:name of ssl cert.
:param key_file_name: key file (ie.pem)
:param cert_file_name: certificate file name
:param input_dir: input directory for certificate file name
:return: returns dict of ssl object
"""
folder_path = input_dir + os.path.sep
key = self.upload_file(folder_path + key_file_name)
cert = self.upload_file(folder_path + cert_file_name)
ssl_kc_obj = None
if key and cert:
cert = {"certificate": cert}
ssl_kc_obj = {
'name': name,
'key': key,
'certificate': cert,
'key_passphrase': ''
}
return ssl_kc_obj
def get_command_from_line(self, line):
"""
This function is used for getting command and line number from conf file.
:param line: line
:return: returns command name and line
"""
cmd = ''
line_no = 0
for member in line:
if 'line_no' in member:
line_no = member[1]
continue
if isinstance(member, str):
cmd += ' %s' % member
else:
cmd += ' -%s' % ' '.join(member)
return cmd, line_no
def update_status_for_skipped(self, skipped_cmds):
"""
:param skipped_cmds: separation of non converted commands
to NA, Indirect,DataScript,NotSupported
:return: None
"""
na_cmds = ns_constants.netscalar_command_status['NotApplicableCommands']
indirect_cmds = ns_constants.netscalar_command_status[
'IndirectCommands']
datascript_cmds = \
ns_constants.netscalar_command_status['DatascriptCommands']
not_supported = ns_constants.netscalar_command_status['NotSupported']
if not skipped_cmds:
return
for cmd in skipped_cmds:
line_no = cmd['line_no']
cmd = cmd['cmd']
cmd = cmd.strip()
for na_cmd in na_cmds:
if cmd.startswith(na_cmd):
# Add status not applicable in csv/report
self.add_status_row(line_no, na_cmd, None, cmd,
STATUS_NOT_APPLICABLE)
break
for id_cmd in indirect_cmds:
if cmd.startswith(id_cmd):
# Add status indirect in csv/report
self.add_status_row(line_no, id_cmd, None, cmd, STATUS_INDIRECT)
break
for datascript_cmd in datascript_cmds:
if cmd.startswith(datascript_cmd):
# Add status datascript in csv/report
self.add_status_row(line_no, datascript_cmd, None, cmd,
STATUS_DATASCRIPT)
break
for not_commands in not_supported:
if cmd.startswith(not_commands):
# Add status not not supported in csv/report
self.add_status_row(line_no, not_commands, None, cmd,
STATUS_COMMAND_NOT_SUPPORTED)
break
def remove_duplicate_objects(self, obj_type, obj_list):
"""
Remove duplicate objects from list
:param obj_type: Object type
:param obj_list: list of all objects
:return: return list which has no duplicates objects
"""
if len(obj_list) == 1:
return obj_list
for source_obj in obj_list:
for index, tmp_obj in enumerate(obj_list):
if tmp_obj["name"] == source_obj["name"]:
continue
src_cp = copy.deepcopy(source_obj)
tmp_cp = copy.deepcopy(tmp_obj)
del src_cp["name"]
if "description" in src_cp:
del src_cp["description"]
del tmp_cp["name"]
if "description" in tmp_cp:
del tmp_cp["description"]
if src_cp.items() == tmp_cp.items():
LOG.warn('Remove duplicate %s object : %s' % (obj_type,
tmp_obj[
"name"]))
del obj_list[index]
self.remove_duplicate_objects(obj_type, obj_list)
return obj_list
def cleanup_config(self, config):
"""
This function is used for deleting temp variables created for conversion
:param config: dict type
:return: None
"""
del config
def clone_pool(self, pool_name, cloned_for, avi_config, userprefix=None):
"""
This function used for cloning shared pools in netscaler.
:param pool_name: name of pool
:param cloned_for: cloned for
:param avi_config: avi config dict
:param userprefix: prefix for objects
:return: None
"""
pools = [pool for pool in avi_config['Pool'] if
pool['name'] == pool_name]
if pools:
pool_obj = copy.deepcopy(pools[0])
pname = pool_obj['name']
pool_name = re.sub('[:]', '-', '%s-%s' % (pname, cloned_for))
pool_obj['name'] = pool_name
avi_config['Pool'].append(pool_obj)
LOG.info(
"Same pool reference to other object. Clone Pool %s for %s" %
(pool_name, cloned_for))
return pool_obj['name']
return None
def get_vs_if_shared_vip(self, avi_config, controller_version):
"""
This function checks if same vip is used for other vs
:param avi_config: avi config dict
:param controller_version:
:return: None
"""
vs_list = [v for v in avi_config['VirtualService'] if
'port_range_end' in
v['services'][0]]
for vs in vs_list:
# Get the list of vs which shared the same vip
if parse_version(controller_version) >= parse_version('17.1'):
vs_port_list = [int(v['services'][0]['port']) for v in
avi_config['VirtualService']
if v['vsvip_ref'].split('name=')[1].split('-')[0] ==
vs['vsvip_ref'].split('name=')[1].split('-')[0]
and 'port_range_end' not in v['services'][0]]
else:
vs_port_list = [int(v['services'][0]['port']) for v in
avi_config['VirtualService'] if v['ip_address'][
'addr'] == vs['ip_address']['addr'] and
'port_range_end' not in v['services'][0]]
if vs_port_list:
min_port = min(vs_port_list)
max_port = max(vs_port_list)
vs['services'][0]['port_range_end'] = str(min_port - 1)
service = {
'enable_ssl': False,
'port': str(max_port + 1),
'port_range_end': '65535'
}
vs['services'].append(service)
def add_prop_for_http_profile(self, profile_name, avi_config, sysdict,
prop_dict):
"""
This method adds the additional attribute to application profile
:param profile_name: name of application profile
:param avi_config: avi config dict
:param sysdict: system/baseline config dict
:param prop_dict: property dict
:return:
"""
profile = [p for p in (avi_config['ApplicationProfile'] + sysdict[
'ApplicationProfile']) if p['name'] == profile_name]
if profile:
if prop_dict.get('clttimeout'):
profile[0]['client_header_timeout'] = int(prop_dict[
'clttimeout'])
profile[0]['client_body_timeout'] = int(prop_dict['clttimeout'])
if prop_dict.get('xff_enabled'):
if profile[0].get('http_profile'):
profile[0]['http_profile'].update(
{
'xff_enabled': True,
'xff_alternate_name': 'X-Forwarded-For'
}
)
else:
profile[0].update({'http_profile':
{
'xff_enabled': True,
'xff_alternate_name': 'X-Forwarded-For'
}
})
if profile[0].get('http_profile'):
profile[0]['http_profile'].update(
{
'x_forwarded_proto_enabled': True,
'hsts_enabled': True,
'http_to_https': True,
'httponly_enabled': True,
'hsts_max_age': 365,
'server_side_redirect_to_https': True,
'secure_cookie_enabled': True
}
)
else:
profile[0].update({'http_profile':
{
'x_forwarded_proto_enabled': True,
'hsts_enabled': True,
'http_to_https': True,
'httponly_enabled': True,
'hsts_max_age': 365,
'server_side_redirect_to_https': True,
'secure_cookie_enabled': True
}
})
def object_exist(self, object_type, name, avi_config):
'''
This method returns true if object exists in avi config dict else false
:param object_type:
:param name:
:param avi_config:
:return:
'''
data = avi_config[object_type]
obj_list = [obj for obj in data if obj['name'] == name]
if obj_list:
return True
return False
def is_shared_same_vip(self, vs, cs_vs_list, avi_config, tenant_name,
cloud_name, tenant_ref, cloud_ref,
controller_version, prefix, input_vrf=None):
"""
This function check for vs sharing same vip
:param vs: Name of vs
:param cs_vs_list: List of vs
:param avi_config: avi config dict
:param tenant_name: Name of tenant
:param cloud_name: Name of cloud
:param tenant_ref: Reference of tenant
:param cloud_ref: Reference of cloud
:param controller_version: controller version
:param prefix: prefix for objects
:param input_vrf: VRF name input
:return: None
"""
if parse_version(controller_version) >= parse_version('17.1'):
# Get the list of vs which shared the same vip
shared_vip = [v for v in cs_vs_list if v['vsvip_ref'
].split('name=')[1].split('-')[0] == vs['vsvip_ref'
].split('name=')[1].split('-')[0] and
v['services'][0][
'port'] == vs['services'][0]['port']]
else:
shared_vip = [v for v in cs_vs_list if v['ip_address']['addr'] ==
vs['ip_address']['addr'] and v['services'][0][
'port'] ==
vs['services'][0]['port']]
if input_vrf:
vrf_ref = self.get_object_ref(input_vrf, 'vrfcontext',
cloud_name=cloud_name)
else:
vrf_ref = self.get_object_ref('global', 'vrfcontext',
cloud_name=cloud_name)
if shared_vip:
return True
elif parse_version(controller_version) >= parse_version('17.1'):
vsvip = vs['vsvip_ref'].split('name=')[1].split('-')[0]
self.create_update_vsvip(vsvip, avi_config['VsVip'], tenant_ref,
cloud_ref, prefix=prefix, vrf_ref=vrf_ref)
name = vsvip + '-vsvip'
# Added prefix for objects
if prefix:
name = prefix + '-' + vsvip + '-vsvip'
updated_vsvip_ref = self.get_object_ref(
name, 'vsvip', tenant_name, cloud_name)
vs['vsvip_ref'] = updated_vsvip_ref
def clone_http_policy_set(self, policy, prefix, avi_config, tenant_name,
cloud_name, used_poolgrp_ref, userprefix=None):
"""
This function clone pool reused in context switching rule
:param policy: name of policy
:param prefix: clone for
:param avi_config: avi config dict
:param tenant_name:
:param cloud_name:
:param used_poolgrp_ref:
:param userprefix: prefix for objects
:return:None
"""
policy_name = policy['name']
clone_policy = copy.deepcopy(policy)
for rule in clone_policy['http_request_policy']['rules']:
if rule.get('switching_action', None) and \
rule['switching_action'].get('pool_group_ref'):
pool_group_ref = \
rule['switching_action']['pool_group_ref'].split('&')[
1].split(
'=')[1]
if pool_group_ref in used_poolgrp_ref:
LOG.debug('Cloned the pool group for policy %s',
policy_name)
pool_group_ref = self.clone_pool_group(
pool_group_ref, policy_name, avi_config, tenant_name,
cloud_name, userprefix=userprefix)
if pool_group_ref:
updated_pool_group_ref = self.get_object_ref(
pool_group_ref, OBJECT_TYPE_POOL_GROUP, tenant_name,
cloud_name)
rule['switching_action']['pool_group_ref'] = \
updated_pool_group_ref
clone_policy['name'] += '-%s-clone' % prefix
return clone_policy
def set_rules_index_for_http_policy_set(self, avi_config):
"""
Update index as per avi protobuf requirements
:param avi_config: avi config dict
:return: None
"""
http_policy_sets = avi_config['HTTPPolicySet']
for http_policy_set in http_policy_sets:
rules = http_policy_set['http_request_policy']['rules']
rules = sorted(rules, key=lambda d: int(d['index']))
for index, rule in enumerate(rules):
rule['index'] = index
def get_netscalar_full_command(self, netscalar_command, obj):
"""
Generate netscaler command from the parse dict
:param netscalar_command: name of command
:param obj: object with attributes
:return: Full command
"""
for attr in obj['attrs']:
netscalar_command += ' %s' % attr
for key in obj:
if isinstance(obj[key], list):
continue
if key == 'line_no':
continue
netscalar_command += ' -%s %s' % (key, obj[key])
return netscalar_command
def clone_pool_group(self, pg_name, cloned_for, avi_config, tenant_name,
cloud_name, userprefix=None):
"""
Used for cloning shared pool group.
:param pg_name: pool group name
:param cloned_for: clone for
:param avi_config: avi config dict
:param tenant_name:
:param cloud_name:
:param userprefix: prefix for objects
:return: None
"""
pool_groups = [pg for pg in avi_config['PoolGroup']
if pg['name'] == pg_name]
if pool_groups:
pool_group = copy.deepcopy(pool_groups[0])
pool_group_name = re.sub('[:]', '-',
'%s-%s' % (pg_name, cloned_for))
pool_group['name'] = pool_group_name
for member in pool_group.get('members', []):
pool_ref = self.get_name(member['pool_ref'])
pool_ref = self.clone_pool(pool_ref, cloned_for, avi_config,
userprefix=userprefix)
if pool_ref:
updated_pool_ref = self.get_object_ref(
pool_ref, OBJECT_TYPE_POOL, tenant_name, cloud_name)
member['pool_ref'] = updated_pool_ref
avi_config['PoolGroup'].append(pool_group)
LOG.info(
"Same pool group reference to other object. Clone Pool group "
"%s for %s" % (pg_name, cloned_for))
return pool_group['name']
return None
def remove_http_mon_from_pool(self, avi_config, pool, sysdict):
"""
This function is used for removing http type health monitor from https
vs.
:param avi_config: avi config dict
:param pool: name of pool
:param sysdict: baseline/system config dict
:return: None
"""
if pool:
hm_refs = copy.deepcopy(pool['health_monitor_refs'])
for hm_ref in hm_refs:
hm = [h for h in (sysdict['HealthMonitor'] + avi_config[
'HealthMonitor']) if h['name'] == hm_ref]
if hm and hm[0]['type'] == 'HEALTH_MONITOR_HTTP':
pool['health_monitor_refs'].remove(hm_ref)
LOG.warning(
'Skipping %s this reference from %s pool because '
'of health monitor type is HTTP and VS has ssl '
'profile.' % (hm_ref, pool['name']))
def remove_https_mon_from_pool(self, avi_config, pool, sysdict):
"""
This function is used for removing https type health monitor from http
vs.
:param avi_config: avi config dict
:param pool: name of pool
:param sysdict: baseline/system config dict
:return: None
"""
if pool:
hm_refs = copy.deepcopy(pool['health_monitor_refs'])
for hm_ref in hm_refs:
hm = [h for h in (sysdict['HealthMonitor'] + avi_config[
'HealthMonitor']) if h['name'] == hm_ref]
if hm and hm[0]['type'] == 'HEALTH_MONITOR_HTTPS':
pool['health_monitor_refs'].remove(hm_ref)
LOG.warning(
'Skipping %s this reference from %s pool because '
'of health monitor type is HTTPS and VS has no ssl '
'profile.' % (hm_ref, pool['name']))
def update_application_profile(self, profile_name, pki_profile_ref,
tenant_ref, name, avi_config, sysdict):
"""
This functions defines to update application profile with pki profile if
application profile exist if not create new http profile with pki profile
:param profile_name: name of Http profile
:param pki_profile_ref: ref of PKI profile
:param tenant_ref: tenant ref
:param name: name of virtual service
:param avi_config: Dict of AVi config
:param sysdict: baseline/system config
:return: Http profile
"""
try:
if profile_name:
app_profile = [p for p in (sysdict['ApplicationProfile'] +
avi_config['ApplicationProfile']) if
p['name'] ==
profile_name]
if app_profile:
app_profile[0]["http_profile"]['pki_profile_ref'] = \
pki_profile_ref
LOG.debug('Added PKI profile to application profile '
'successfully : %s' % (
profile_name, pki_profile_ref))
else:
app_profile = dict()
app_profile['name'] = name + '-%s-%s' % (
random.randrange(0, 1000),
ns_constants.PLACE_HOLDER_STR)
app_profile['tenant_ref'] = tenant_ref
app_profile['type'] = 'APPLICATION_PROFILE_TYPE_HTTP'
http_profile = dict()
http_profile['connection_multiplexing_enabled'] = False
http_profile['xff_enabled'] = False
# TODO: clientIpHdrExpr conversion to xff_alternate_name
http_profile['websockets_enabled'] = False
http_profile['pki_profile_ref'] = pki_profile_ref
app_profile["http_profile"] = http_profile
avi_config['ApplicationProfile'].append(app_profile)
LOG.debug(
"Conversion completed successfully for httpProfile: %s" %
app_profile['name'])
return app_profile['name']
except:
update_count('error')
LOG.error("Error in convertion of httpProfile", exc_info=True)
def convert_persistance_prof(self, vs, name, tenant_ref):
"""
This function defines that it convert the persistent profile and
return that profile
:param vs: object of lb vs or pool
:param name: name of application persteance profile
:param tenant_ref: reference of tenant
:return: application persistent profile
"""
profile = None
persistenceType = vs.get('persistenceType', '')
if persistenceType == 'COOKIEINSERT':
timeout = vs.get('timeout', 2)
profile = {
"http_cookie_persistence_profile": {
"always_send_cookie": False
},
"persistence_type": "PERSISTENCE_TYPE_HTTP_COOKIE",
"server_hm_down_recovery": "HM_DOWN_PICK_NEW_SERVER",
"name": name,
}
# Added time if greater than zero
if int(timeout) > 0:
profile['http_cookie_persistence_profile']["timeout"] = timeout
elif persistenceType == 'SOURCEIP':
# Set timeout equal to 2 if not provided.
timeout = vs.get('timeout', 120)
timeout = int(timeout) / 60
if timeout < 1:
timeout = 1
profile = {
"server_hm_down_recovery": "HM_DOWN_PICK_NEW_SERVER",
"persistence_type": "PERSISTENCE_TYPE_CLIENT_IP_ADDRESS",
"ip_persistence_profile": {
"ip_persistent_timeout": timeout
},
"name": name
}
elif persistenceType == 'SSLSESSION':
profile = {
"server_hm_down_recovery": "HM_DOWN_PICK_NEW_SERVER",
"persistence_type": "PERSISTENCE_TYPE_TLS",
"name": name
}
profile['tenant_ref'] = tenant_ref
return profile
def update_status_target_lb_vs_to_indirect(self, larget_lb_vs):
"""
This function defines that update status for the target lb vserver as
Indirect
:param larget_lb_vs: name of target lb vserver
:return: None
"""
global csv_writer_dict_list
row = [row for row in csv_writer_dict_list
if row['Object Name'] == larget_lb_vs
and row['Netscaler Command'] == 'add lb vserver']
if row:
row[0]['Status'] = STATUS_INDIRECT
def create_http_policy_set_for_redirect_url(self, vs_obj, redirect_uri,
avi_config, tenant_name, tenant_ref, enable_ssl):
"""
This function defines that create http policy for redirect url
:param vs_obj: object of VS
:param redirect_uri: redirect uri
:param avi_config: dict of AVi
:param tenant_name: name of tenant
:param tenant_ref: tenant ref
:param enable_ssl: flag for enabling ssl
:return: None
"""
redirect_uri = str(redirect_uri).replace('"', '')
action = self.build_redirect_action_dict(redirect_uri, enable_ssl)
policy_obj = {
'name': vs_obj['name'] + '-redirect-policy',
'tenant_ref': tenant_ref,
'http_request_policy': {
'rules': [
{
'index': 0,
'name': vs_obj['name'] + '-redirect-policy-rule-0',
'match': {
'path': {
'match_case': 'INSENSITIVE',
'match_str': [
'/'
],
'match_criteria': 'EQUALS'
}
},
'redirect_action': action
}
]
}
}
updated_http_policy_ref = self.get_object_ref(policy_obj['name'],
OBJECT_TYPE_HTTP_POLICY_SET,
tenant_name)
http_policies = {
'index': 11,
'http_policy_set_ref': updated_http_policy_ref
}
if not vs_obj.get('http_policies'):
vs_obj['http_policies'] = []
else:
ind = max([policies['index'] for policies in vs_obj[
'http_policies']])
http_policies['index'] = ind + 1
vs_obj['http_policies'].append(http_policies)
avi_config['HTTPPolicySet'].append(policy_obj)
def clean_virtual_service_from_avi_config(self, avi_config,
controller_version):
"""
This function defines that clean up vs which has vip 0.0.0.0
:param avi_config: dict of AVI
:param controller_version:
:return: None
"""
vs_list = copy.deepcopy(avi_config['VirtualService'])
avi_config['VirtualService'] = []
if parse_version(controller_version) >= parse_version('17.1'):
avi_config['VirtualService'] = \
[vs for vs in vs_list
if vs['vsvip_ref'].split('name=')[1].split('-')[0] != '0.0.0.0']
else:
avi_config['VirtualService'] = \
[vs for vs in vs_list
if vs['ip_address']['addr'] != '0.0.0.0']
def parse_url(self, url):
"""
This method returns the parsed url
:param url: url that need to be parsed
:return:
"""
parsed = urlparse(url)
return parsed
def format_string_to_json(self, avi_string):
"""
This function defines that it convert string into json format to
convert into dict
:param avi_string: string to be converted
:return: Return converted string
"""
avi_string = avi_string.split('__/__')[0]
return ast.literal_eval(avi_string)
def get_csv_object_list(self, csv_writer_dict_list, command_list):
"""
This method is used for getting csv object
:param csv_writer_dict_list: CSV row of object from xlsx report
:param command_list: List of netscaler commands
:return: List of CSV rows
"""
csv_object = [row for row in
csv_writer_dict_list
if row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]
and row['Netscaler Command'] in
command_list]
return csv_object
def get_csv_skipped_list(self, csv_object, name_of_object, vs_ref):
"""
This method is used for getting skipped list from vs.
:param csv_object: CSV row of object from xlsx report
:param name_of_object: Name of Object
:param vs_ref: Reference of VS
:return: List of skipped settings
"""
skipped_list = []
for each_partial in csv_object:
avi_object_json = \
self.format_string_to_json(each_partial['AVI Object'])
if avi_object_json.get('name') and \
avi_object_json['name'] == name_of_object:
# Set the VS reference for Netscaler status row
each_partial['VS Reference'] = vs_ref
repls = ('[', ''), (']', '')
skipped_setting_csv = reduce(lambda a, kv: a.replace(*kv),
repls,
each_partial['Skipped settings'])
if skipped_setting_csv:
skipped_list.append(skipped_setting_csv)
return skipped_list
def get_ssl_key_and_cert_refs_skipped(self, csv_writer_dict_list,
object_name, vs_ref):
"""
This functions defines that get the skipped list of CSV row
:param csv_writer_dict_list: CSV row of object from xlsx report
:param object_name: like virtual service or pool name
:param vs_ref: Reference of VS
:return: List of skipped settings
"""
ssl_key_cert = \
self.get_name(object_name['ssl_key_and_certificate_refs'][0])
csv_object = self.get_csv_object_list(
csv_writer_dict_list, ['bind ssl vserver', 'bind ssl service',
'bind ssl serviceGroup'])
skipped_list = self.get_csv_skipped_list(csv_object, ssl_key_cert,
vs_ref)
return ssl_key_cert, skipped_list
def get_ssl_profile_skipped(self, csv_writer_dict_list, ssl_profile_ref,
vs_ref):
"""
This functions defines that get the skipped list of CSV row
:param csv_writer_dict_list: CSV row of object from xlsx report
:param ssl_profile_ref: reference of ssl profile object
:param vs_ref: virtual service obj reference.
:return: List of skipped settings
"""
ssl_profile_name = self.get_name(ssl_profile_ref)
csv_object = \
self.get_csv_object_list(csv_writer_dict_list,
['set ssl vserver', 'set ssl service',
'set ssl serviceGroup'])
skipped_list = self.get_csv_skipped_list(csv_object, ssl_profile_name,
vs_ref)
return ssl_profile_name, skipped_list
def get_application_profile_skipped(self, csv_writer_dict_list,
name_of_object, vs_ref):
"""
This functions defines that get the skipped list of CSV row
:param csv_writer_dict_list: CSV row of object from xlsx report
:param name_of_object: object name like pool name, etc
:param vs_ref: virtual service obj reference.
:return: List of skipped settings
"""
ssl_profile_name = self.get_name(
name_of_object['application_profile_ref'])
csv_object = self.get_csv_object_list(
csv_writer_dict_list, ['add ns httpProfile'])
skipped_list = self.get_csv_skipped_list(csv_object, ssl_profile_name,
vs_ref)
return ssl_profile_name, skipped_list
def get_network_profile_skipped(self, csv_writer_dict_list, name_of_object,
vs_ref):
"""
This functions defines that get the skipped list of CSV row
:param csv_writer_dict_list:List of add ns tcpProfile netscaler command rows
:param name_of_object: object name like pool name, etc
:param vs_ref: virtual service obj reference.
:return: List of skipped settings
"""
ssl_profile_name = self.get_name(name_of_object['network_profile_ref'])
csv_object = self.get_csv_object_list(
csv_writer_dict_list, ['add ns tcpProfile'])
skipped_list = self.get_csv_skipped_list(csv_object, ssl_profile_name,
vs_ref)
return ssl_profile_name, skipped_list
def get_app_persistence_profile_skipped(self, csv_writer_dict_list,
name_of_object, vs_ref):
"""
This functions defines that get the skipped list of CSV row
:param csv_writer_dict_list: List of set lb group netscaler command rows
:param name_of_object: object name like pool name, etc
:param vs_ref: virtual service obj reference.
:return: List of skipped settings
"""
# Changed ssl profile name to ssl profile ref.
app_persistence_profile_name = self.get_name(
name_of_object['ssl_profile_ref'])
csv_object = self.get_csv_object_list(csv_writer_dict_list, ['set lb group'])
skipped_list = self.get_csv_skipped_list(
csv_object, app_persistence_profile_name, vs_ref)
return app_persistence_profile_name, skipped_list
def get_pool_skipped_list(self, avi_config, pool_group_name,
skipped_setting, csv_object, obj_name,
csv_writer_dict_list, vs_ref):
"""
This method is used for getting pool skipped list.
:param avi_config: AVI dict
:param pool_group_name: Name of Pool group
:param skipped_setting: List of skipped settings
:param csv_object: CSV row
:param obj_name: Name of Object
:param csv_writer_dict_list: List of bind lb vserver netscaler command
rows
:param vs_ref: vs object reference
:return: List of skipped settings
"""
pool_group_object_ref = [pool_group_object_ref for pool_group_object_ref
in avi_config['PoolGroup'] if
pool_group_object_ref[
'name'] == pool_group_name]
for pool_group in pool_group_object_ref:
if 'members' in pool_group:
for each_pool_ref in pool_group['members']:
pool_name = self.get_name(each_pool_ref['pool_ref'])
skipped_list = self.get_csv_skipped_list(csv_object, pool_name,
vs_ref)
if len(skipped_list) > 0:
skipped_setting[obj_name] = {}
skipped_setting[obj_name]['pool'] = {}
skipped_setting[obj_name]['pool'][
'pool_name'] = pool_name
skipped_setting[obj_name]['pool']['pool_skipped_list'] \
= skipped_list
for pool_partial in csv_object:
avi_object_json = self.format_string_to_json(
pool_partial['AVI Object'])
if avi_object_json['name'] == pool_name:
if 'health_monitor_refs' in avi_object_json and \
avi_object_json['health_monitor_refs']:
monitor_refs = \
avi_object_json['health_monitor_refs']
for monitor_ref in monitor_refs:
monitor_ref = self.get_name(monitor_ref)
csv_object = self.get_csv_object_list(
csv_writer_dict_list,
['add lb monitor'])
skipped_list = self.get_csv_skipped_list(
csv_object, monitor_ref, vs_ref)
if skipped_list:
skipped_setting[obj_name] = {}
skipped_setting[obj_name]['pool'] = {}
skipped_setting[obj_name]['pool'][
'pool_name'] = pool_name
skipped_setting[obj_name]['pool'][
'health monitor'] = {}
skipped_setting[obj_name]['pool'][
'health monitor'][
'name'] = monitor_ref
skipped_setting[obj_name]['pool'][
'health monitor']['skipped_list'] =\
skipped_list
if 'ssl_key_and_certificate_refs' in avi_object_json:
name, skipped = \
self.get_ssl_key_and_cert_refs_skipped(
csv_writer_dict_list, avi_object_json,
vs_ref)
if skipped:
skipped_setting[obj_name] = {}
skipped_setting[obj_name]['pool'] = {}
skipped_setting[obj_name]['pool'][
'pool_name'] = pool_name
skipped_setting[
obj_name]['pool'][
'ssl key and cert'] = {}
skipped_setting[
obj_name]['pool']['ssl key and cert'][
'name'] = name
skipped_setting[
obj_name]['pool']['ssl key and cert'][
'skipped_list'] = skipped
if 'ssl_profile_ref' in avi_object_json:
name, skipped = \
self.get_ssl_profile_skipped(
csv_writer_dict_list, avi_object_json[
'ssl_profile_ref'], vs_ref)
if skipped:
skipped_setting[obj_name] = {}
skipped_setting[obj_name]['pool'] = {}
skipped_setting[obj_name]['pool'][
'pool_name'] = pool_name
skipped_setting[obj_name]['pool'][
'ssl profile'] = {}
skipped_setting[obj_name]['pool'][
'ssl profile']['name'] = name
skipped_setting[obj_name]['pool'][
'ssl profile']['skipped_list'] = skipped
# Get the skipped settings of application
# persistence profile ref.
if 'application_persistence_profile_ref' in \
avi_object_json:
name, skipped = \
self.get_app_persistence_profile_skipped(
csv_writer_dict_list, avi_object_json,
vs_ref)
if skipped:
skipped_setting[obj_name] = {}
skipped_setting[obj_name]['pool'] = {}
skipped_setting[obj_name]['pool'][
'pool_name'] = pool_name
skipped_setting[obj_name]['pool'][
'Application Persistence profile'] = {}
skipped_setting[obj_name]['pool'][
'Application Persistence profile'][
'name'] = name
skipped_setting[obj_name]['pool'][
'Application Persistence profile'][
'skipped_list'] = skipped
# Get the skipped settings of application
# persistence profile ref.
if 'application_persistence_profile_ref' \
in avi_object_json:
name, skipped = \
self.get_app_persistence_profile_skipped(
csv_writer_dict_list, avi_object_json,
vs_ref)
if skipped:
skipped_setting[obj_name] = {}
skipped_setting[obj_name]['pool'] = {}
skipped_setting[obj_name]['pool'][
'pool_name'] = pool_name
skipped_setting[obj_name]['pool'][
'Application Persistence profile'] = {}
skipped_setting[obj_name]['pool'][
'Application Persistence profile'][
'name'] = name
skipped_setting[obj_name]['pool'][
'Application Persistence profile'][
'skipped_list'] = skipped
def vs_complexity_level(self):
"""
This method calculate complexity of vs.
:return:
"""
vs_csv_objects = [row for row in csv_writer_dict_list
if
row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]
and row['Netscaler Command'] in [
'add cs vserver', 'add lb vserver']]
for vs_csv_object in vs_csv_objects:
virtual_service = self.format_string_to_json(
vs_csv_object['AVI Object'])
# Update the complexity level of VS as Basic or Advanced
self.update_vs_complexity_level(vs_csv_object, virtual_service)
def vs_per_skipped_setting_for_references(self, avi_config):
"""
This functions defines that Add the skipped setting per VS CSV row
:param avi_config: this methode use avi_config for checking vs skipped
:return: None
"""
# Get the count of vs sucessfully migrated
global fully_migrated
global total_count
global progressbar_count
fully_migrated = 0
# Get the VS object list which is having status successful and partial.
vs_csv_objects = [row for row in csv_writer_dict_list
if
row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]
and row['Netscaler Command'] in [
'add cs vserver', 'add lb vserver']]
# calculate total count
total_count = total_count + len(vs_csv_objects)
for vs_csv_object in vs_csv_objects:
progressbar_count += 1
skipped_setting = {}
virtual_service = self.format_string_to_json(
vs_csv_object['AVI Object'])
# Update the complexity level of VS as Basic or Advanced
self.update_vs_complexity_level(vs_csv_object, virtual_service)
vs_ref = virtual_service['name']
repls = ('[', ''), (']', '')
# Get list of skipped setting attributes
skipped_setting_csv = reduce(lambda a, kv: a.replace(*kv), repls,
vs_csv_object['Skipped settings'])
if skipped_setting_csv:
skipped_setting['virtual_service'] = [skipped_setting_csv]
# Get the skipped list for ssl key and cert
if 'ssl_key_and_certificate_refs' in virtual_service:
name, skipped = self.get_ssl_key_and_cert_refs_skipped(
csv_writer_dict_list, virtual_service, vs_ref)
if skipped:
skipped_setting['ssl key and cert'] = {}
skipped_setting['ssl key and cert']['name'] = name
skipped_setting['ssl key and cert'][
'skipped_list'] = skipped
# Get the skipped list for ssl profile name.
# Changed ssl profile name to ssl profile ref.
if 'ssl_profile_ref' in virtual_service:
name, skipped = self.get_ssl_profile_skipped(
csv_writer_dict_list, virtual_service['ssl_profile_ref'],
vs_ref)
if skipped:
skipped_setting['ssl profile'] = {}
skipped_setting['ssl profile']['name'] = name
skipped_setting['ssl profile']['skipped_list'] = skipped
# Get the skipped list for pool group.
if 'pool_group_ref' in virtual_service:
pool_group_name = self.get_name(
virtual_service['pool_group_ref'])
csv_object = self.get_csv_object_list(
csv_writer_dict_list, ['bind lb vserver'])
self.get_pool_skipped_list(
avi_config, pool_group_name, skipped_setting, csv_object,
'pool group', csv_writer_dict_list, vs_ref)
# Get the skipepd list for http policy.
if 'http_policies' in virtual_service:
csv_object = self.get_csv_object_list(
csv_writer_dict_list,
['add cs policy', 'add responder policy',
'add rewrite policy'])
for http_ref in virtual_service['http_policies']:
http_name = self.get_name(http_ref['http_policy_set_ref'])
skipped_list = self.get_csv_skipped_list(csv_object,
http_name,
vs_ref)
if skipped_list:
skipped_setting['Httppolicy'] = {}
skipped_setting['Httppolicy']['name'] = http_name
skipped_setting['Httppolicy'][
'skipped_list'] = skipped_list
# Get the http policy name
for each_http_policy in avi_config['HTTPPolicySet']:
if each_http_policy['name'] == http_name:
for http_req in \
each_http_policy['http_request_policy'][
'rules']:
if http_req.get('switching_action', None) and \
http_req['switching_action'].get(
'pool_group_ref', None):
pool_group_name = self.get_name(
http_req['switching_action']
['pool_group_ref'])
self.get_pool_skipped_list(
avi_config, pool_group_name,
skipped_setting, csv_object,
'Httppolicy',
csv_writer_dict_list, vs_ref)
# Get the skipped list for application_profile_ref.
if 'application_profile_ref' in virtual_service and \
'admin:System' not in \
virtual_service['application_profile_ref']:
name, skipped = self.get_application_profile_skipped(
csv_writer_dict_list, virtual_service, vs_ref)
if skipped:
skipped_setting['Application profile'] = {}
skipped_setting['Application profile'][
'name'] = name
skipped_setting['Application profile'][
'skipped_list'] = skipped
# Get the skipped list for network profile ref.
if 'network_profile_ref' in virtual_service and \
'admin:System' not in \
virtual_service['network_profile_ref']:
name, skipped = self.get_network_profile_skipped(
csv_writer_dict_list, virtual_service, vs_ref)
if skipped:
skipped_setting['Network profile'] = {}
skipped_setting['Network profile'][
'name'] = name
skipped_setting['Network profile'][
'skipped_list'] = skipped
# Update overall skipped setting of VS csv row
if skipped_setting:
vs_csv_object.update(
{'Overall skipped settings': str(skipped_setting)})
else:
vs_csv_object.update(
{'Overall skipped settings': "FULLY MIGRATION"})
fully_migrated += 1
msg = "Writing excel sheet started..."
self.print_progress_bar(progressbar_count, total_count, msg,
prefix='Progress', suffix='')
csv_objects = [row for row in csv_writer_dict_list
if row['Status'] in [STATUS_PARTIAL, STATUS_SUCCESSFUL]
and row['Netscaler Command'] not in ['add cs vserver',
'add lb vserver']
and (
'VS Reference' not in row or not row[
'VS Reference'])]
# Update the vs reference not in used if objects are not attached to
# VS directly or indirectly
for csv_object in csv_objects:
csv_object['VS Reference'] = STATUS_NOT_IN_USE
def write_status_report_and_pivot_table_in_xlsx(self, row_list, output_dir,
report_name, vs_level_status):
"""
This method writes the status and make pivot table in excel sheet
:param row_list:
:param output_dir:
:param report_name:
:param vs_level_status:
:return:
"""
global total_count
global progressbar_count
# List of fieldnames for headers
if vs_level_status:
fieldnames = ['Line Number', 'Netscaler Command', 'Object Name',
'Full Command', 'Status', 'Skipped settings',
'Indirect mapping', 'Not Applicable', 'User Ignored',
'Overall skipped settings', 'Complexity Level',
'VS Reference', 'AVI Object']
else:
fieldnames = ['Line Number', 'Netscaler Command', 'Object Name',
'Full Command', 'Status', 'Skipped settings',
'Indirect mapping', 'Not Applicable', 'User Ignored',
'Complexity Level' , 'AVI Object']
xlsx_report = output_dir + os.path.sep + ("%s-ConversionStatus.xlsx" %
report_name)
# xlsx workbook
status_wb = Workbook(xlsx_report)
# xlsx worksheet
status_ws = status_wb.add_worksheet("Status Sheet")
# Lock the first row of xls report.
status_ws.freeze_panes(1, 0)
first_row = 0
for header in fieldnames:
col = fieldnames.index(header)
status_ws.write(first_row, col, header)
row = 1
for row_data in row_list:
progressbar_count += 1
for _key, _value in row_data.items():
if _key in fieldnames:
col = fieldnames.index(_key)
status_ws.write(row, col, _value)
msg = "Writing excel sheet started..."
self.print_progress_bar(progressbar_count, total_count, msg,
prefix='Progress', suffix='')
row += 1
status_wb.close()
# create dataframe for row list
df = pandas.DataFrame(row_list, columns=fieldnames)
# create pivot table using pandas
pivot_table = pandas.pivot_table(df,
index=["Status", "Netscaler Command"],
values=[], aggfunc=[len], fill_value=0)
# create dataframe for pivot table using pandas
pivot_df = pandas.DataFrame(pivot_table)
master_book = load_workbook(xlsx_report)
master_writer = pandas.ExcelWriter(xlsx_report, engine='openpyxl')
master_writer.book = master_book
# Add pivot table in Pivot sheet
pivot_df.to_excel(master_writer, 'Pivot Sheet')
master_writer.save()
def update_skip_duplicates(self, obj, obj_list, obj_type,
merge_object_mapping, name, ent_type, prefix,
syslist):
"""
This method merge duplicate objects
:param obj: Source object to find duplicates for
:param obj_list: List of object to search duplicates in
:param obj_type: Type of object to add in converted_objs status
:param converted_objs: Converted avi object or merged object name
:param name: Name of the object
:param default_profile_name : Name of root parent default profile
:return:
"""
dup_of = None
merge_object_mapping[obj_type].update({name: name})
dup_of, old_name = self.check_for_duplicates(obj, obj_list, obj_type,
merge_object_mapping, ent_type,
prefix,
syslist)
if dup_of:
LOG.info(
"Duplicate profiles: %s merged in %s" % (obj['name'], dup_of))
# Update value of ssl profile with merged profile
if old_name in merge_object_mapping[obj_type].keys():
merge_object_mapping[obj_type].update({old_name: dup_of})
merge_object_mapping[obj_type].update({name: dup_of})
return True
return False
def create_update_vsvip(self, vip, vsvip_config, tenant_ref, cloud_ref,
prefix=None, vrf_ref=None):
"""
This functions defines that create or update VSVIP object.
:param vip: vip of VS
:param vsvip_config: List of vs object
:param tenant_ref: tenant reference
:param cloud_ref: cloud reference
:param prefix: prefix for objects
:param vrf_ref: VRF ref to be added in VIP object
:return: None
"""
# Get the exsting vsvip object list if present
name = vip + '-vsvip'
# Added prefix for objects
if prefix:
name = prefix + '-' + name
vsvip = [vip_obj for vip_obj in vsvip_config
if vip_obj['name'] == name]
if vsvip:
diff_ten = [vips for vips in vsvip if vips['tenant_ref'] !=
tenant_ref]
if diff_ten:
LOG.debug('VsVip %s is repeated with vrf %s but different '
'tenant %s', name, self.get_name(vrf_ref) if vrf_ref
else 'None', self.get_name(tenant_ref))
name = ''
# If VSVIP object not present then create new VSVIP object.
else:
vsvip_object = {
"name": name,
"tenant_ref": tenant_ref,
"cloud_ref": cloud_ref,
"vip": [
{
"vip_id": "0",
"ip_address": {
"type": "V4",
"addr": vip
}
}
],
}
if vrf_ref:
vsvip_object["vrf_context_ref"] = vrf_ref
vsvip_config.append(vsvip_object)
def get_redirect_fail_action(self, url):
"""
This method returns the fail action dict
:param url: url
:return:
"""
parsed = urlparse(url)
redirect_fail_action = {
'fail_action': {
'redirect': {
'host': parsed.hostname,
'protocol': str(parsed.scheme).upper(),
'status_code': "HTTP_REDIRECT_STATUS_CODE_302"
},
"type": "FAIL_ACTION_HTTP_REDIRECT"
}
}
if parsed.path:
redirect_fail_action['fail_action']['redirect']['path'] = \
str(parsed.path).replace('"', '')
if parsed.query:
redirect_fail_action['fail_action']['redirect'][
'query'] = parsed.query
return redirect_fail_action
def cleanup_dupof(self, avi_config):
"""
This method is used to clean up dup_of key from different AVI objects
:param avi_config:
:return:
"""
self.remove_dup_key(avi_config["ApplicationProfile"])
self.remove_dup_key(avi_config["NetworkProfile"])
self.remove_dup_key(avi_config["SSLProfile"])
self.remove_dup_key(avi_config['PKIProfile'])
self.remove_dup_key(avi_config["ApplicationPersistenceProfile"])
self.remove_dup_key(avi_config['HealthMonitor'])
def update_profile_ref(self, ref, avi_obj, merge_obj_list):
"""
This method is used to update the profile references which was
attached at the time of creation
:param ref:
:param avi_obj:
:param merge_obj_list:
:return:
"""
for obj in avi_obj:
obj_ref = obj.get(ref)
tenant_ref = obj.get('tenant_ref')
if obj_ref:
name = self.get_name(obj_ref)
tenant = self.get_name(tenant_ref)
if name in merge_obj_list:
updated_name = merge_obj_list[name]
if ref == 'application_persistence_profile_ref':
type_cons = OBJECT_TYPE_APPLICATION_PERSISTENCE_PROFILE
if ref == 'application_profile_ref':
type_cons = OBJECT_TYPE_APPLICATION_PROFILE
obj[ref] = self.get_object_ref(updated_name, type_cons,
tenant)
def vs_redirect_http_to_https(self, avi_config, sysdict):
"""
Removes the VS which is redirected to another VS amd update the
status and avi object for that VS
:param avi_config: avi configuration after all conversion
:param sysdict: system configuration
:return:
"""
vsrem = {}
LOG.debug("Check started for redirect from HTTP VS to HTTPS VS with "
"no pool")
for vs in avi_config['VirtualService']:
if not vs.get('pool_group_ref') and not vs.get(
'application_profile_ref') and vs.get('services', []) and \
not all([s.get('enable_ssl', True)for s in vs['services']])\
and vs.get('http_policies',[]) and vs['http_policies'][
0].get('http_policy_set_ref'):
polname = self.get_name(vs['http_policies'][0][
'http_policy_set_ref'])
pol = [pl for pl in avi_config['HTTPPolicySet'] if pl['name']
== polname]
if pol and pol[0].get('http_request_policy', {}).get('rules',
[]) and pol[0]['http_request_policy']['rules'][0].get(
'redirect_action'):
iplist = [ip['ip_address']['addr'] for ip in vs.get('vip',
[]) if ip.get('ip_address',{}).get('addr')] or (
[vs['ip_address']['addr']] if vs.get(
'ip_address',{}).get('addr') else [])
if iplist:
for nvs in avi_config['VirtualService']:
if vs['name'] != nvs['name'] and [ip for ip in
iplist if ip in ([nip['ip_address']['addr']
for nip in nvs.get('vip', []) if nip.get(
'ip_address',{}).get('addr')] or [nvs[
'ip_address']['addr'] if nvs.get(
'ip_address',{}).get('addr') else []])]:
appname = self.get_name(nvs[
'application_profile_ref']) if \
nvs.get('application_profile_ref') \
else None
if appname == 'ns-migrate-http':
LOG.debug("%s has redirect to %s, hence "
"removing %s" % (vs['name'],
nvs['name'], vs['name']))
vsrem[vs['name']] = nvs['name']
appprof = [pr for pr in (avi_config[
'ApplicationProfile'] + sysdict[
'ApplicationProfile']) if pr['name']
== appname]
if appprof and appprof[0]['type'] == \
'APPLICATION_PROFILE_TYPE_HTTP':
if appprof[0].get('http_profile'):
appprof[0]['http_profile'][
'http_to_https'] = True
else:
appprof[0]['http_profile'] = {
'http_to_https': True}
LOG.debug("%s has redirect to %s, hence "
"setting 'http_to_https' as true "
"and removing %s" %(vs['name'],
nvs['name'], vs['name']))
vsrem[vs['name']] = nvs['name']
# Condition to merge http ports to https vs
if [True for ssl in nvs['services'] if ssl[
'enable_ssl']] and \
[True for ssl_vs in vs['services']
if not ssl_vs['enable_ssl']]:
nvs['services'].append(vs['services'][0])
vsrem[vs['name']] = nvs['name']
LOG.debug("Check completed for redirect from HTTP VS to HTTPS VS with "
"no pool")
if vsrem:
avi_config['VirtualService'] = [v for v in avi_config[
'VirtualService'] if v['name'] not
in vsrem.keys()]
LOG.debug('%s VS got removed from AVI configuration' % str(len(
vsrem)))
for cl in csv_writer_dict_list:
if cl['Object Name'] in vsrem.keys() and cl[
'Netscaler Command'] in ['add lb vserver', 'add cs vserver']:
cl['Status'] = STATUS_INDIRECT
cl['AVI Object'] = 'Redirected to %s' % vsrem[cl[
'Object Name']]
def merge_pool(self, avi_config):
"""
This method merge the pools in AVI if HM is same
:param avi_config:
:return:
"""
mergelist=[]
for poolgrp in avi_config['PoolGroup']:
if poolgrp['name'] == 'lb-depoed1cdb.qai-pri-5984-poolgroup':
print('found')
# do not merge the pool if it is a backup pool in the group
pool_member = [obj for obj in poolgrp['members'] if not
obj.get('priority_label', '10') == '2']
length = len(pool_member)
for count in range(length):
pool_name = pool_member[count]['pool_ref'].split(
'&')[1].split('=')[1]
if pool_name in mergelist:
continue
pool = [pl for pl in avi_config['Pool']
if pl['name'] == pool_name]
if not pool:
LOG.debug("'%s' not present" % pool_name)
continue
for count2 in range(count+1, length):
pname = pool_member[count2]['pool_ref'].split(
'&')[1].split('=')[1]
nextpool = [pol for pol in avi_config['Pool']
if pol['name'] == pname]
if not nextpool:
LOG.debug("'%s' not present" % pname)
continue
if pool[0]['health_monitor_refs'].sort() == nextpool[0][
'health_monitor_refs'].sort():
LOG.debug("Merging pool '%s' in '%s'" % (nextpool[0][
'name'], pool[0]['name']))
ip_port = set()
for ser in pool[0]['servers']:
ip_port.add(str(ser['ip']['addr']) + ':' + str(
ser['port']))
for server in nextpool[0]['servers']:
ipport = str(server['ip']['addr']) + ':' + str(
server['port'])
if ipport not in list(ip_port):
pool[0]['servers'].append(server)
for cl in csv_writer_dict_list:
if cl['Object Name'] == (nextpool[0][
'name'].replace('-pool','')) and cl[
'Netscaler Command'] in ['add service',
'add serviceGroup']:
cl['AVI Object'] = 'Merged to %s' % pool[0][
'name']
mergelist.append(nextpool[0]['name'])
for plg in avi_config['PoolGroup']:
plg['members'] = [member for member in plg['members'] if
member['pool_ref'].split('&')[1].split('=')[1] not
in mergelist]
avi_config['Pool'] = [pools for pools in avi_config['Pool'] if pools[
'name'] not in mergelist]
def add_policy(self, policy, updated_vs_name, avi_config, tmp_policy_ref,
vs_obj, tenant_name, cloud_name, prefix, used_poolgrp_ref):
"""
This method is used to add policy objects to AVI and also add
reference in VS
:param policy: policy object
:param updated_vs_name: vs name
:param avi_config: avi config dict
:param tmp_policy_ref: list of policy ref which are already used
:param vs_obj: vs object
:param tenant_name: name of tenant
:param cloud_name: name of cloud
:param prefix: prefix
:param used_poolgrp_ref: list of used pool group ref
:return:
"""
if policy['name'] in tmp_policy_ref:
# clone the http policy set if it is referenced to other VS
policy = self.clone_http_policy_set(policy, updated_vs_name,
avi_config, tenant_name, cloud_name, used_poolgrp_ref,
userprefix=prefix)
updated_http_policy_ref = self.get_object_ref(policy['name'],
OBJECT_TYPE_HTTP_POLICY_SET, tenant_name)
tmp_policy_ref.append(policy['name'])
http_policies = {
'index': 11,
'http_policy_set_ref': updated_http_policy_ref
}
if not vs_obj.get('http_policies'):
vs_obj['http_policies'] = []
else:
ind = max([policies['index'] for policies in vs_obj[
'http_policies']])
http_policies['index'] = ind + 1
vs_obj['http_policies'].append(http_policies)
avi_config['HTTPPolicySet'].append(policy)
def build_redirect_action_dict(self, redirect_url, enable_ssl):
"""
This method returns a redirect action dict
:param redirect_url: redirect url
:param enable_ssl: flag for ssl enable
:return:
"""
redirect_url = self.parse_url(redirect_url)
protocol = str(redirect_url.scheme).upper()
hostname = str(redirect_url.hostname)
pathstring = str(redirect_url.path)
querystring = str(redirect_url.query)
full_path = '%s?%s' % (pathstring, querystring) if pathstring and \
querystring else pathstring
protocol = enable_ssl and 'HTTPS' or 'HTTP' if not protocol else \
protocol
action = {
'protocol': protocol
}
if hostname:
action.update({'host':
{
'type': 'URI_PARAM_TYPE_TOKENIZED',
'tokens': [{
'type': 'URI_TOKEN_TYPE_STRING',
'str_value': hostname,
'start_index': '0',
'end_index': '65535'
}]
}
})
if full_path:
action.update({'path':
{
'type': 'URI_PARAM_TYPE_TOKENIZED',
'tokens': [{
'type': 'URI_TOKEN_TYPE_STRING',
'str_value': full_path,
'start_index': '0',
'end_index': '65535'
}]
}
})
return action
def create_http_to_https_custom_profile(self):
'''
:return: custom application profile dict
'''
return {
'name': "ns-migrate-http",
'type': "APPLICATION_PROFILE_TYPE_HTTP",
'tenant_ref': "/api/tenant/?name=admin",
'preserve_client_ip': False,
'http_profile': {
'max_rps_uri': 0,
'keepalive_header': False,
'max_rps_cip_uri': 0,
'x_forwarded_proto_enabled': False,
'connection_multiplexing_enabled': True,
'websockets_enabled': True,
'enable_request_body_buffering': False,
'hsts_enabled': False,
'xff_enabled': True,
'disable_keepalive_posts_msie6': True,
'keepalive_timeout': 30000,
'ssl_client_certificate_mode': "SSL_CLIENT_CERTIFICATE_NONE",
'http_to_https': True,
'max_bad_rps_cip_uri': 0,
'client_body_timeout': 30000,
'httponly_enabled': False,
'hsts_max_age': 365,
'max_bad_rps_cip': 0,
'server_side_redirect_to_https': False,
'client_max_header_size': 12,
'client_max_request_size': 48,
'max_rps_unknown_uri': 0,
'post_accept_timeout': 30000,
'client_header_timeout': 10000,
'secure_cookie_enabled': False,
'xff_alternate_name': "X-Forwarded-For",
'max_rps_cip': 0,
'client_max_body_size': 0,
'max_rps_unknown_cip': 0,
'allow_dots_in_header_name': False,
'max_bad_rps_uri': 0,
'use_app_keepalive_timeout': False
},
'dos_rl_profile': {
'rl_profile': {
'client_ip_connections_rate_limit': {
'explicit_tracking': False,
'action': {
'status_code': "HTTP_LOCAL_RESPONSE_STATUS_CODE_429",
'type': "RL_ACTION_NONE"
},
'fine_grain': False
}
},
'dos_profile': {
'thresh_period': 5
}
}
}
def correct_vs_ref(self, avi_config):
"""
This method corrects the reference of VS to different objects
:param avi_config: avi configuration dict
:return:
"""
global csv_writer_dict_list
avi_graph = self.make_graph(avi_config)
csv_dict_sub = [row for row in csv_writer_dict_list if row[
'Netscaler Command'] not in ('add lb vserver',
'add cs vserver') and row[
'Status'] in (STATUS_PARTIAL,
STATUS_SUCCESSFUL)]
for dict_row in csv_dict_sub:
obj = dict_row['AVI Object']
if isinstance(obj, str) and obj.startswith('{'):
vs = []
if '__/__' in obj:
for dataobj in obj.split('__/__'):
obj = eval(dataobj)
self.add_vs_ref(obj, avi_graph, vs)
else:
obj = eval(obj)
self.add_vs_ref(obj, avi_graph, vs)
if vs:
dict_row['VS Reference'] = str(list(set(vs)))
else:
dict_row['VS Reference'] = STATUS_NOT_IN_USE
def add_vs_ref(self, obj, avi_graph, vs):
"""
Helper method for adding vs ref
:param obj: object
:param avi_graph: avi graph
:param vs: VS list
:return:
"""
obj_name = obj.get('name', obj.get('hostname'))
if obj_name:
if avi_graph.has_node(obj_name):
LOG.debug("Checked predecessor for %s", obj_name)
predecessor = list(avi_graph.predecessors(obj_name))
if predecessor:
self.get_predecessor(predecessor, avi_graph, vs)
else:
LOG.debug("Object %s may be merged or orphaned", obj_name)
def get_predecessor(self, predecessor, avi_graph, vs):
"""
This method gets the predecessor of the object
:param predecessor: predecessor list
:param avi_graph: avi graph
:param vs: VS list
:return:
"""
if len(predecessor) > 1:
for node in predecessor:
nodelist = [node]
self.get_predecessor(nodelist, avi_graph, vs)
elif len(predecessor):
node_obj = [nod for nod in list(avi_graph.nodes().data()) if
nod[0] == predecessor[0]]
if node_obj and (node_obj[0][1]['type'] == 'VS' or 'VS' in node_obj[
0][1]['type']):
LOG.debug("Predecessor %s found", predecessor[0])
vs.extend(predecessor)
else:
LOG.debug("Checked predecessor for %s", predecessor[0])
nodelist = list(avi_graph.predecessors(predecessor[0]))
self.get_predecessor(nodelist, avi_graph, vs)
else:
LOG.debug("No more predecessor")
| 45.734504 | 85 | 0.509453 | [
"Apache-2.0"
] | avinetworks/alb-sdk | python/avi/migrationtools/netscaler_converter/ns_util.py | 88,542 | Python |
"""BitMEX API Connector."""
from __future__ import absolute_import
import requests
import time
import datetime
import json
import base64
import uuid
from market_maker.auth import APIKeyAuthWithExpires
from market_maker.utils import constants, errors, log
from market_maker.ws.ws_thread import BitMEXWebsocket
from threading import Timer
logger = log.setup_custom_logger('root')
# https://www.bitmex.com/api/explorer/
class BitMEX(object):
"""BitMEX API Connector."""
def __init__(self, base_url=None, symbol=None, apiKey=None, apiSecret=None,
orderIDPrefix='mm_bitmex_', shouldWSAuth=True, postOnly=False, timeout=7):
"""Init connector."""
self.base_url = base_url
self.symbol = symbol
self.postOnly = postOnly
self.shouldWSAuth = shouldWSAuth
if (apiKey is None):
raise Exception("Please set an API key and Secret to get started. See " +
"https://github.com/BitMEX/sample-market-maker/#getting-started for more information."
)
self.apiKey = apiKey
self.apiSecret = apiSecret
if len(orderIDPrefix) > 13:
raise ValueError("settings.ORDERID_PREFIX must be at most 13 characters long!")
self.orderIDPrefix = orderIDPrefix
self.retries = 0 # initialize counter
# Prepare HTTPS session
self.session = requests.Session()
# These headers are always sent
self.session.headers.update({'user-agent': 'liquidbot-' + constants.VERSION})
self.session.headers.update({'content-type': 'application/json'})
self.session.headers.update({'accept': 'application/json'})
# Create websocket for streaming data
self.ws = BitMEXWebsocket()
self.ws.connect(base_url, symbol, shouldAuth=shouldWSAuth)
self.__check_ws_alive()
self.timeout = timeout
def __check_ws_alive(self):
if not self.ws.updated:
self.ws = BitMEXWebsocket()
self.ws.connect(self.base_url, self.symbol, shouldAuth=self.shouldWSAuth)
self.ws.updated = False
self.t = Timer(10, self.__check_ws_alive).start()
def __del__(self):
self.exit()
def exit(self):
self.ws.exit()
#
# Public methods
#
def ticker_data(self, symbol=None):
"""Get ticker data."""
if symbol is None:
symbol = self.symbol
return self.ws.get_ticker(symbol)
def instrument(self, symbol):
"""Get an instrument's details."""
return self.ws.get_instrument(symbol)
def instruments(self, filter=None):
query = {}
if filter is not None:
query['filter'] = json.dumps(filter)
return self._curl_bitmex(path='instrument', query=query, verb='GET')
def market_depth(self):
"""Get market depth / orderbook."""
return self.ws.market_depth()
def recent_trades(self):
"""Get recent trades.
Returns
-------
A list of dicts:
{u'amount': 60,
u'date': 1306775375,
u'price': 8.7401099999999996,
u'tid': u'93842'},
"""
return self.ws.recent_trades()
#
# Authentication required methods
#
def authentication_required(fn):
"""Annotation for methods that require auth."""
def wrapped(self, *args, **kwargs):
if not (self.apiKey):
msg = "You must be authenticated to use this method"
raise errors.AuthenticationError(msg)
else:
return fn(self, *args, **kwargs)
return wrapped
@authentication_required
def funds(self):
"""Get your current balance."""
return self.ws.funds()
@authentication_required
def position(self, symbol):
"""Get your open position."""
return self.ws.position(symbol)
@authentication_required
def isolate_margin(self, symbol, leverage, rethrow_errors=False):
"""Set the leverage on an isolated margin position"""
path = "position/leverage"
postdict = {
'symbol': symbol,
'leverage': leverage
}
return self._curl_bitmex(path=path, postdict=postdict, verb="POST", rethrow_errors=rethrow_errors)
@authentication_required
def delta(self):
return self.position(self.symbol)['homeNotional']
@authentication_required
def buy(self, quantity, price):
"""Place a buy order.
Returns order object. ID: orderID
"""
return self.place_order(quantity, price)
@authentication_required
def sell(self, quantity, price):
"""Place a sell order.
Returns order object. ID: orderID
"""
return self.place_order(-quantity, price)
@authentication_required
def place_order(self, quantity, price):
"""Place an order."""
if price < 0:
raise Exception("Price must be positive.")
endpoint = "order"
# Generate a unique clOrdID with our prefix so we can identify it.
clOrdID = self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('utf8').rstrip('=\n')
postdict = {
'symbol': self.symbol,
'orderQty': quantity,
'price': price,
'clOrdID': clOrdID
}
return self._curl_bitmex(path=endpoint, postdict=postdict, verb="POST")
@authentication_required
def amend_bulk_orders(self, orders):
"""Amend multiple orders."""
# Note rethrow; if this fails, we want to catch it and re-tick
return self._curl_bitmex(path='order/bulk', postdict={'orders': orders}, verb='PUT', rethrow_errors=True)
@authentication_required
def create_bulk_orders(self, orders):
"""Create multiple orders."""
for order in orders:
order['clOrdID'] = self.orderIDPrefix + base64.b64encode(uuid.uuid4().bytes).decode('utf8').rstrip('=\n')
order['symbol'] = self.symbol
if self.postOnly:
order['execInst'] = 'ParticipateDoNotInitiate'
return self._curl_bitmex(path='order/bulk', postdict={'orders': orders}, verb='POST')
@authentication_required
def open_orders(self):
"""Get open orders."""
return self.ws.open_orders(self.orderIDPrefix)
@authentication_required
def http_open_orders(self):
"""Get open orders via HTTP. Used on close to ensure we catch them all."""
path = "order"
orders = self._curl_bitmex(
path=path,
query={
'filter': json.dumps({'ordStatus.isTerminated': False, 'symbol': self.symbol}),
'count': 500
},
verb="GET"
)
# Only return orders that start with our clOrdID prefix.
return [o for o in orders if str(o['clOrdID']).startswith(self.orderIDPrefix)]
@authentication_required
def cancel(self, orderID):
"""Cancel an existing order."""
path = "order"
postdict = {
'orderID': orderID,
}
return self._curl_bitmex(path=path, postdict=postdict, verb="DELETE")
@authentication_required
def withdraw(self, amount, fee, address):
path = "user/requestWithdrawal"
postdict = {
'amount': amount,
'fee': fee,
'currency': 'XBt',
'address': address
}
return self._curl_bitmex(path=path, postdict=postdict, verb="POST", max_retries=0)
def _curl_bitmex(self, path, query=None, postdict=None, timeout=None, verb=None, rethrow_errors=False,
max_retries=None):
"""Send a request to BitMEX Servers."""
# Handle URL
url = self.base_url + path
if timeout is None:
timeout = self.timeout
# Default to POST if data is attached, GET otherwise
if not verb:
verb = 'POST' if postdict else 'GET'
# By default don't retry POST or PUT. Retrying GET/DELETE is okay because they are idempotent.
# In the future we could allow retrying PUT, so long as 'leavesQty' is not used (not idempotent),
# or you could change the clOrdID (set {"clOrdID": "new", "origClOrdID": "old"}) so that an amend
# can't erroneously be applied twice.
if max_retries is None:
max_retries = 0 if verb in ['POST', 'PUT'] else 3
# Auth: API Key/Secret
auth = APIKeyAuthWithExpires(self.apiKey, self.apiSecret)
def exit_or_throw(e):
if rethrow_errors:
raise e
else:
exit(1)
def retry():
self.retries += 1
if self.retries > max_retries:
raise Exception("Max retries on %s (%s) hit, raising." % (path, json.dumps(postdict or '')))
return self._curl_bitmex(path, query, postdict, timeout, verb, rethrow_errors, max_retries)
# Make the request
response = None
try:
logger.info("sending req to %s: %s" % (url, json.dumps(postdict or query or '')))
req = requests.Request(verb, url, json=postdict, auth=auth, params=query)
prepped = self.session.prepare_request(req)
response = self.session.send(prepped, timeout=timeout)
# Make non-200s throw
response.raise_for_status()
except requests.exceptions.HTTPError as e:
if response is None:
raise e
# 401 - Auth error. This is fatal.
if response.status_code == 401:
logger.error("API Key or Secret incorrect, please check and restart.")
logger.error("Error: " + response.text)
if postdict:
logger.error(postdict)
# Always exit, even if rethrow_errors, because this is fatal
exit(1)
# 404, can be thrown if order canceled or does not exist.
elif response.status_code == 404:
if verb == 'DELETE':
logger.error("Order not found: %s" % postdict['orderID'])
return
logger.error("Unable to contact the BitMEX API (404). " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
exit_or_throw(e)
# 429, ratelimit; cancel orders & wait until X-RateLimit-Reset
elif response.status_code == 429:
logger.error("Ratelimited on current request. Sleeping, then trying again. Try fewer " +
"order pairs or contact [email protected] to raise your limits. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
# Figure out how long we need to wait.
ratelimit_reset = response.headers['X-RateLimit-Reset']
to_sleep = int(ratelimit_reset) - int(time.time())
reset_str = datetime.datetime.fromtimestamp(int(ratelimit_reset)).strftime('%X')
# We're ratelimited, and we may be waiting for a long time. Cancel orders.
logger.warning("Canceling all known orders in the meantime.")
self.cancel([o['orderID'] for o in self.open_orders()])
logger.error("Your ratelimit will reset at %s. Sleeping for %d seconds." % (reset_str, to_sleep))
time.sleep(to_sleep)
# Retry the request.
return retry()
# 503 - BitMEX temporary downtime, likely due to a deploy. Try again
elif response.status_code == 503:
logger.warning("Unable to contact the BitMEX API (503), retrying. " +
"Request: %s \n %s" % (url, json.dumps(postdict)))
time.sleep(3)
return retry()
elif response.status_code == 400:
error = response.json()['error']
message = error['message'].lower() if error else ''
# Duplicate clOrdID: that's fine, probably a deploy, go get the order(s) and return it
if 'duplicate clordid' in message:
orders = postdict['orders'] if 'orders' in postdict else postdict
IDs = json.dumps({'clOrdID': [order['clOrdID'] for order in orders]})
orderResults = self._curl_bitmex('/order', query={'filter': IDs}, verb='GET')
for i, order in enumerate(orderResults):
if (
order['orderQty'] != abs(postdict['orderQty']) or
order['side'] != ('Buy' if postdict['orderQty'] > 0 else 'Sell') or
order['price'] != postdict['price'] or
order['symbol'] != postdict['symbol']):
raise Exception('Attempted to recover from duplicate clOrdID, but order returned from API ' +
'did not match POST.\nPOST data: %s\nReturned order: %s' % (
json.dumps(orders[i]), json.dumps(order)))
# All good
return orderResults
elif 'insufficient available balance' in message:
logger.error('Account out of funds. The message: %s' % error['message'])
exit_or_throw(Exception('Insufficient Funds'))
# If we haven't returned or re-raised yet, we get here.
logger.error("Unhandled Error: %s: %s" % (e, response.text))
logger.error("Endpoint was: %s %s: %s" % (verb, path, json.dumps(postdict)))
exit_or_throw(e)
except requests.exceptions.Timeout as e:
# Timeout, re-run this request
logger.warning("Timed out on request: %s (%s), retrying..." % (path, json.dumps(postdict or '')))
return retry()
except requests.exceptions.ConnectionError as e:
logger.warning("Unable to contact the BitMEX API (%s). Please check the URL. Retrying. " +
"Request: %s %s \n %s" % (e, url, json.dumps(postdict)))
time.sleep(1)
return retry()
# Reset retry counter on success
self.retries = 0
return response.json()
| 38.780749 | 121 | 0.574049 | [
"Apache-2.0"
] | veskokaradzhov/sample-market-maker | market_maker/bitmex.py | 14,504 | Python |
"""
Django settings for activitytracker project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
# import dj_database_url
# from decouple import Csv, config
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# DATABASES['default'] = dj_database_url.config()
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=++yhol*$dghpolbn73tnifzx(bqdfk1on5i&8m9*-4!k)=8*k'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# ALLOWED_HOSTS = ['*']
ALLOWED_HOSTS = [ ]
# ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=Csv())
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'accounts',
'activities',
'home',
'Custom',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'activitytracker.urls'
WSGI_APPLICATION = 'activitytracker.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
#for heroku
# DEBUG = config('DEBUG', default=False, cast=bool)
# DATABASES = {
# 'default': dj_database_url.config(
# default=config('DATABASE_URL')
# )
# }
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'activitytracker',
'USER': 'root',
'PASSWORD': 'root',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kolkata'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
"django.contrib.auth.context_processors.auth",
]
}
}
]
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
)
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = '[email protected]'
#Must generate specific password for your app in [gmail settings][1]
EMAIL_HOST_PASSWORD = 'app_specific_password'
EMAIL_PORT = 587
#This did the trick
DEFAULT_FROM_EMAIL = EMAIL_HOST_USER
GITHUB_CLIENT = ''
GITHUB_TOKEN = '' | 23.608108 | 72 | 0.706926 | [
"BSD-3-Clause"
] | codyowl/activitytracker | activitytracker/settings.py | 3,494 | Python |
# -*- coding: utf-8 -*-
# Copyright (c) 2014, 2015 Mitch Garnaat
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.6.0'
| 37.294118 | 74 | 0.741325 | [
"Apache-2.0"
] | BrunoCarrier/kappa | kappa/__init__.py | 634 | Python |
import io
import json
import logging
from pathlib import Path
import boto3
from ..api import API
from ..exceptions import (
SABaseException, SAExistingAnnotationClassNameException,
SANonExistingAnnotationClassNameException
)
from .project import get_project_metadata
logger = logging.getLogger("superannotate-python-sdk")
_api = API.get_instance()
def create_annotation_class(project, name, color, attribute_groups=None):
"""Create annotation class in project
:param project: project name or metadata of the project
:type project: str or dict
:param name: name for the class
:type name: str
:param color: RGB hex color value, e.g., "#FFFFAA"
:type color: str
:param attribute_groups: example:
[ { "name": "tall", "is_multiselect": 0, "attributes": [ { "name": "yes" }, { "name": "no" } ] },
{ "name": "age", "is_multiselect": 0, "attributes": [ { "name": "young" }, { "name": "old" } ] } ]
:type attribute_groups: list of dicts
:return: new class metadata
:rtype: dict
"""
if not isinstance(project, dict):
project = get_project_metadata(project)
try:
get_annotation_class_metadata(project, name)
except SANonExistingAnnotationClassNameException:
pass
else:
logger.warning(
"Annotation class %s already in project. Skipping.", name
)
return None
team_id, project_id = project["team_id"], project["id"]
logger.info(
"Creating annotation class in project %s with name %s", project["name"],
name
)
params = {
'team_id': team_id,
'project_id': project_id,
}
data = {
"classes":
[
{
"name":
name,
"color":
color,
"attribute_groups":
attribute_groups if attribute_groups is not None else []
}
]
}
response = _api.send_request(
req_type='POST', path='/classes', params=params, json_req=data
)
if not response.ok:
raise SABaseException(
response.status_code, "Couldn't create class " + response.text
)
res = response.json()
new_class = res[0]
return new_class
def delete_annotation_class(project, annotation_class):
"""Deletes annotation class from project
:param project: project name or metadata of the project
:type project: str or dict
:param project: annotation class name or metadata
:type project: str or dict
"""
if not isinstance(project, dict):
project = get_project_metadata(project)
if not isinstance(annotation_class, dict):
annotation_class = get_annotation_class_metadata(
project, annotation_class
)
team_id, project_id, name, class_id = _api.team_id, annotation_class[
"project_id"], annotation_class["name"], annotation_class["id"]
logger.info(
"Deleting annotation class from project %s with name %s",
project["name"], name
)
params = {
'team_id': team_id,
'project_id': project_id,
}
response = _api.send_request(
req_type='DELETE', path=f'/class/{class_id}', params=params
)
if not response.ok:
raise SABaseException(
response.status_code,
"Couldn't delete annotation class " + response.text
)
def create_annotation_classes_from_classes_json(
project, classes_json, from_s3_bucket=None
):
"""Creates annotation classes in project from a SuperAnnotate format
annotation classes.json.
:param project: project name or metadata of the project
:type project: str or dict
:param classes_json: JSON itself or path to the JSON file
:type classes_json: list or Pathlike (str or Path)
:param from_s3_bucket: AWS S3 bucket to use. If None then classes_json is in local filesystem
:type from_s3_bucket: str
:return: list of created annotation class metadatas
:rtype: list of dicts
"""
if not isinstance(project, dict):
project = get_project_metadata(project)
team_id, project_id = project["team_id"], project["id"]
if not isinstance(classes_json, list):
logger.info(
"Creating annotation classes in project %s from %s.",
project["name"], classes_json
)
if from_s3_bucket is None:
classes = json.load(open(classes_json))
else:
from_session = boto3.Session()
from_s3 = from_session.resource('s3')
file = io.BytesIO()
from_s3_object = from_s3.Object(from_s3_bucket, classes_json)
from_s3_object.download_fileobj(file)
file.seek(0)
classes = json.load(file)
else:
classes = classes_json
existing_classes = search_annotation_classes(project)
new_classes = []
for cs in classes:
if cs["name"] in existing_classes:
logger.warning(
"Annotation class %s already in project. Skipping.", cs["name"]
)
else:
new_classes.append(cs)
res = []
def del_unn(d):
for s in [
"updatedAt", "createdAt", "id", "project_id", "group_id",
"class_id", "count"
]:
if s in d:
del d[s]
for annotation_class in new_classes:
del_unn(annotation_class)
for attribute_group in annotation_class["attribute_groups"]:
del_unn(attribute_group)
for attribute in attribute_group["attributes"]:
del_unn(attribute)
CHUNK_SIZE = 2000
for i in range(0, len(new_classes), CHUNK_SIZE):
params = {
'team_id': team_id,
'project_id': project_id,
}
data = {"classes": new_classes[i:i + CHUNK_SIZE]}
response = _api.send_request(
req_type='POST', path='/classes', params=params, json_req=data
)
if not response.ok:
raise SABaseException(
response.status_code, "Couldn't create classes " + response.text
)
res += response.json()
assert len(res) == len(new_classes)
return res
def search_annotation_classes(project, name_prefix=None, return_metadata=False):
"""Searches annotation classes by name_prefix (case-insensitive)
:param project: project name or metadata of the project
:type project: str or dict
:param name_prefix: name prefix for search. If None all annotation classes
will be returned
:type name_prefix: str
:return: annotation classes of the project
:rtype: list of dicts
"""
if not isinstance(project, dict):
project = get_project_metadata(project)
result_list = []
team_id, project_id = project["team_id"], project["id"]
params = {'team_id': team_id, 'project_id': project_id, 'offset': 0}
if name_prefix is not None:
params['name'] = name_prefix
while True:
response = _api.send_request(
req_type='GET', path='/classes', params=params
)
if not response.ok:
raise SABaseException(
response.status_code, "Couldn't search classes " + response.text
)
res = response.json()
result_list += res["data"]
new_len = len(result_list)
# for r in result_list:
# print(r)
if res["count"] <= new_len:
break
params["offset"] = new_len
if return_metadata:
return result_list
else:
return [x["name"] for x in result_list]
def get_annotation_class_metadata(project, annotation_class_name):
"""Returns annotation class metadata
:param project: project name or metadata of the project
:type project: str or dict
:param annotation_class_name: annotation class name
:type annotation_class_name: str
:return: metadata of annotation class
:rtype: dict
"""
annotation_classes = search_annotation_classes(
project, annotation_class_name, return_metadata=True
)
results = []
for annotation_class in annotation_classes:
if annotation_class["name"] == annotation_class_name:
results.append(annotation_class)
if len(results) > 1:
raise SAExistingAnnotationClassNameException(
0, "Annotation class name " + annotation_class_name +
" is not unique. To use SDK please make annotation class names unique."
)
elif len(results) == 1:
return results[0]
else:
raise SANonExistingAnnotationClassNameException(
0, "Annotation class with name " + annotation_class_name +
" doesn't exist."
)
def download_annotation_classes_json(project, folder):
"""Downloads project classes.json to folder
:param project: project name or metadata of the project
:type project: str or dict
:param folder: folder to download to
:type folder: Pathlike (str or Path)
:return: path of the download file
:rtype: str
"""
if not isinstance(project, dict):
project = get_project_metadata(project)
logger.info(
"Downloading classes.json from project %s to folder %s.",
project["name"], folder
)
clss = search_annotation_classes(project, return_metadata=True)
filepath = Path(folder) / "classes.json"
json.dump(clss, open(filepath, "w"), indent=4)
return str(filepath)
def fill_class_and_attribute_names(annotations_json, annotation_classes_dict):
for r in annotations_json:
if "classId" in r and r["classId"] in annotation_classes_dict:
r["className"] = annotation_classes_dict[r["classId"]]["name"]
if "attributes" in r:
for attribute in r["attributes"]:
if "groupId" in attribute and "id" in "attribute":
attribute["groupName"] = annotation_classes_dict[
r["classId"]]["attribute_groups"][
attribute["groupId"]]["name"]
attribute["name"] = annotation_classes_dict[
r["classId"]]["attribute_groups"][attribute[
"groupId"]]["attributes"][attribute["id"]]
def fill_class_and_attribute_ids(annotation_json, annotation_classes_dict):
for ann in annotation_json:
if (
"userId" in ann and "type" in ann and ann["type"] == "meta"
) or "className" not in ann:
continue
annotation_class_name = ann["className"]
if not annotation_class_name in annotation_classes_dict:
logger.warning(
"Couldn't find annotation class %s", annotation_class_name
)
continue
class_id = annotation_classes_dict[annotation_class_name]["id"]
ann["classId"] = class_id
for attribute in ann["attributes"]:
if attribute["groupName"] in annotation_classes_dict[
annotation_class_name]["attribute_groups"]:
attribute["groupId"] = annotation_classes_dict[
annotation_class_name]["attribute_groups"][
attribute["groupName"]]["id"]
else:
logger.warning(
"Couldn't find annotation group %s", attribute["groupName"]
)
continue
if attribute["name"] in annotation_classes_dict[
annotation_class_name]["attribute_groups"][
attribute["groupName"]]["attributes"]:
attribute["id"] = annotation_classes_dict[
annotation_class_name]["attribute_groups"][
attribute["groupName"]]["attributes"][attribute["name"]]
else:
logger.warning(
"Couldn't find annotation name %s in annotation group %s",
attribute["name"], attribute["groupName"]
)
del attribute["groupId"]
def get_annotation_classes_id_to_name(annotation_classes):
annotation_classes_dict = {}
for annotation_class in annotation_classes:
class_id = annotation_class["id"]
class_name = annotation_class["name"]
class_info = {"name": class_name}
class_info["attribute_groups"] = {}
if "attribute_groups" in annotation_class:
for attribute_group in annotation_class["attribute_groups"]:
attribute_group_info = {}
for attribute in attribute_group["attributes"]:
attribute_group_info[attribute["id"]] = attribute["name"]
class_info["attribute_groups"][attribute_group["id"]] = {
"name": attribute_group["name"],
"attributes": attribute_group_info
}
annotation_classes_dict[class_id] = class_info
return annotation_classes_dict
def get_annotation_classes_name_to_id(annotation_classes):
annotation_classes_dict = {}
for annotation_class in annotation_classes:
class_id = annotation_class["id"]
class_name = annotation_class["name"]
class_info = {"id": class_id}
class_info["attribute_groups"] = {}
if "attribute_groups" in annotation_class:
for attribute_group in annotation_class["attribute_groups"]:
attribute_group_info = {}
for attribute in attribute_group["attributes"]:
if attribute["name"] in attribute_group_info:
logger.warning(
"Duplicate annotation class attribute name %s in attribute group %s. Only one of the annotation classe attributes will be used. This will result in errors in annotation upload.",
attribute["name"], attribute_group["name"]
)
attribute_group_info[attribute["name"]] = attribute["id"]
if attribute_group["name"] in class_info["attribute_groups"]:
logger.warning(
"Duplicate annotation class attribute group name %s. Only one of the annotation classe attribute groups will be used. This will result in errors in annotation upload.",
attribute_group["name"]
)
class_info["attribute_groups"][attribute_group["name"]] = {
"id": attribute_group["id"],
"attributes": attribute_group_info
}
if class_name in annotation_classes_dict:
logger.warning(
"Duplicate annotation class name %s. Only one of the annotation classes will be used. This will result in errors in annotation upload.",
class_name
)
annotation_classes_dict[class_name] = class_info
return annotation_classes_dict
| 37.118519 | 206 | 0.609526 | [
"MIT"
] | yuki-inaho/superannotate-python-sdk | superannotate/db/annotation_classes.py | 15,033 | Python |
from cmstack.hdfg import hdfgutils
from cmstack.hdfg.hdfg_pb2 import Component, ValueInfo
import logging
from . import is_literal, is_number
def flatten_graph(output_graph, graph, templates, context,edge_node_ids, arg_map):
components = {}
for e in graph.edge_info:
copy_edge = ValueInfo()
if is_literal(e):
uid = str(e)
copy_edge.CopyFrom(graph.edge_info[e])
copy_edge.name = uid
elif e in arg_map.keys():
uid = context + e
copy_edge.CopyFrom(arg_map[e])
copy_edge.attributes['alias'].CopyFrom(hdfgutils.make_attribute('alias', arg_map[e].name))
else:
uid = context + e
copy_edge.CopyFrom(graph.edge_info[e])
copy_edge.name = uid
if e in graph.input and e not in output_graph.input:
output_graph.input.extend([uid])
elif e in graph.state and e not in output_graph.state:
output_graph.state.extend([uid])
elif e in graph.output and e not in output_graph.output:
output_graph.output.extend([uid])
elif e in graph.parameters and e not in output_graph.parameters:
output_graph.parameters.extend([uid])
if graph.name != 'main':
ordered_args = hdfgutils.get_attribute_value(graph.attributes['ordered_args'])
else:
ordered_args = []
if 'dimensions' in list(copy_edge.attributes):
dims = hdfgutils.get_attribute_value(copy_edge.attributes['dimensions'])
new_dims = []
for d in dims:
if d in arg_map.keys():
new_dims.append(arg_map[d].name)
else:
new_dims.append(d)
copy_edge.attributes['dimensions'].CopyFrom(hdfgutils.make_attribute('dimensions', new_dims))
if uid not in edge_node_ids['edges'].keys():
edge_node_ids['edges'][uid] = str(len(edge_node_ids['edges'].keys()))
output_graph.edge_info[uid].CopyFrom(copy_edge)
if e not in arg_map.keys():
output_graph.edge_info[uid].gid = int(edge_node_ids['edges'][uid])
output_graph.edge_info[uid].attributes['component_type'].CopyFrom(hdfgutils.make_attribute('component_type', graph.op_type))
for n in graph.sub_graph:
op_cat = n.op_cat
if op_cat == 'component':
if n.op_type in components.keys():
components[n.op_type] += 1
new_context = context + n.op_type + str(components[n.op_type]) + '/'
else:
components[n.op_type] = 0
new_context = context + n.op_type + str(components[n.op_type]) + '/'
instance_args = hdfgutils.get_attribute_value(n.attributes['ordered_args'])
ctemplate = templates[n.op_type]
signature_args = hdfgutils.get_attribute_value(ctemplate.attributes['ordered_args'])
carg_map = create_map(instance_args, signature_args,graph.edge_info, ctemplate.edge_info, templates[n.op_type])
update_statement_graphs(ctemplate, output_graph, new_context)
flatten_graph(output_graph, ctemplate, templates, new_context , edge_node_ids, carg_map)
else:
new = update_node(n, context, arg_map)
if new.name not in edge_node_ids['nodes'].keys():
edge_node_ids['nodes'][new.name] = str(len(edge_node_ids['nodes'].keys()))
new.gid = int(edge_node_ids['nodes'][new.name])
output_graph.sub_graph.extend([new])
def update_statement_graphs(template, output_graph, context):
for s in template.statement_graphs:
statement_nodes = s.statement_node
new_graph = output_graph.statement_graphs.add()
nodes = []
for n in statement_nodes:
nodes.append(context + n)
new_graph.statement_node.extend(nodes)
def create_map(instance_args, signature_args, instance_edges, signature_edges, op=None):
carg_map = {}
for i in range(len(instance_args)):
iarg = instance_args[i]
sarg = signature_args[i]
if is_number(iarg):
iarg = str(iarg)
carg_map[sarg] = instance_edges[iarg]
carg_map[sarg].name = iarg
idims = hdfgutils.get_attribute_value(instance_edges[iarg].attributes['dimensions'])
iid_literal = False
if instance_edges[iarg].iid:
inst_iid = instance_edges[iarg].iid
iid_literal = is_literal(inst_iid)
sdims = hdfgutils.get_attribute_value(signature_edges[sarg].attributes['dimensions'])
if len(idims) != len(sdims) and not iid_literal:
logging.error("Error! Dimensions between edges connecting components do not match:{} versus {} for {} and {}".format(idims, sdims, iarg, sarg))
elif not iid_literal:
for d in range(len(idims)):
inst_dim = idims[d]
sig_dim = sdims[d]
if is_number(inst_dim):
inst_dim = str(inst_dim)
carg_map[sig_dim] = instance_edges[inst_dim]
carg_map[sig_dim].name = inst_dim
carg_map[sig_dim].attributes['vtype'].CopyFrom(hdfgutils.make_attribute('vtype', 'scalar'))
if len(signature_args) > len(instance_args):
start = len(instance_args)
for default in signature_args[start:]:
sig_attr = list(signature_edges[default].attributes)
if 'default' not in sig_attr:
logging.error(
"Error! No default value for unspecified arg: {}".format(default))
else:
def_val = hdfgutils.get_attribute_value(signature_edges[default].attributes['default'])
carg_map[default] = signature_edges[default]
carg_map[default].attributes['value'].CopyFrom(hdfgutils.make_attribute('value', def_val))
if is_number(def_val):
def_val = str(def_val)
carg_map[default].name = def_val
carg_map[default].attributes['vtype'].CopyFrom(hdfgutils.make_attribute('vtype', 'scalar'))
for e in op.edge_info:
vcat = hdfgutils.get_attribute_value(op.edge_info[e].attributes['vcat'])
if vcat == 'declaration':
dims = hdfgutils.get_attribute_value(op.edge_info[e].attributes['dimensions'])
sig_name = op.edge_info[e].name.rsplit("/", 1)[-1]
return carg_map
def update_node(node, context, carg_map):
new = Component(name=context + node.name)
inputs = []
outputs = []
states = []
parameters = []
for inp in node.input:
if is_number(inp):
i = str(inp)
else:
i = inp
if is_literal(i):
inputs.append(i)
elif i in carg_map.keys():
# inputs.append(carg_map[i])
inputs.append(carg_map[i].name)
else:
inputs.append(context + i)
new.input.extend(inputs)
for o in node.output:
if is_number(o):
out = str(o)
else:
out = o
if is_literal(out):
outputs.append(out)
elif out in carg_map.keys():
# outputs.append(carg_map[out])
outputs.append(carg_map[out].name)
else:
outputs.append(context + out)
new.output.extend(outputs)
for st in node.state:
if is_number(st):
s = str(st)
else:
s = st
if is_literal(s):
states.append(s)
elif s in carg_map.keys():
# states.append(carg_map[s])
states.append(carg_map[s].name)
else:
states.append(context + s)
new.state.extend(states)
for para in node.parameters:
if is_number(para):
p = str(para)
else:
p = para
if is_literal(p):
parameters.append(p)
elif p in carg_map.keys():
# parameters.append(carg_map[p])
parameters.append(carg_map[p].name)
else:
parameters.append(context + p)
new.parameters.extend(parameters)
for attr in node.attributes:
new.attributes[attr].CopyFrom(node.attributes[attr])
new.op_type = node.op_type
return new
| 35.939655 | 155 | 0.598225 | [
"Apache-2.0"
] | he-actlab/cdstack | cmstack/hdfg/passes/flatten.py | 8,338 | Python |
"""FastAPI Project for CodeSpace.
https://csdot.ml
"""
| 13.75 | 33 | 0.690909 | [
"Apache-2.0"
] | codespacedot/CodeSpaceAPI | src/__init__.py | 55 | Python |
import cv2
import os,shutil
import numpy as np
from Adb import Adb
import time
class Photo():
'''
提取图片信息,比较图片
'''
def __init__(self,img_path) -> None:
'''
读取图片
'''
self.img = cv2.imread(img_path)
class sourceData():
'''
获取测试数据
'''
def __init__(self) -> None:
pass
@staticmethod
def getScreenPhoto():
adb = Adb(device='d5c42b2a')
for x in range(100):
adb.screenCap()
adb.pullBackScreenCap(os.path.join('.','photo',time.strftime("%Y-%m-%d_%H-%M-%S.png", time.localtime()) ))
print("截图",time.asctime(time.localtime()))
time.sleep(3)
@staticmethod
def calcOujilide(img):
img_new = img[938:1035,1935:2247]
img_new_num = np.sum(img_new)/(img_new.shape[0]*img_new.shape[1]*img_new.shape[2])
return img_new_num
@staticmethod
def calcFangcha(img):
'''
计算938:1035,1935:2247区域间图片的方差,用于比较图片见相似程度
计算过程,对图像每一行像素求平均,对所有行像素平均值求方差
return (int)
'''
img_new = img[938:1013,1935:2247]
img_avg = np.mean(img_new,axis=(0,2))
return np.var(img_avg)
if __name__ is '__main__':
static_num = sourceData.calcFangcha(cv2.imread(os.path.join("adb","screen.png")))
for img_name in os.listdir(os.path.join("photo")):
img = cv2.imread(os.path.join("photo",img_name))
img_num = sourceData.calcFangcha(img)
chazhi = abs(static_num-img_num)
# chazhi = (abs(static_num**2-img_num**2))**0.5
print(img_name,"的差值为",chazhi)
if chazhi<20:
print("Copy this file: ",img_name)
shutil.copyfile(os.path.join("photo",img_name),os.path.join("photo2",img_name))
print("Write this file: ",img_name)
cv2.imwrite(os.path.join("photo3",img_name),img[938:1013,1935:2247])
# '''截图 400s'''
# sourceData.getScreenPhoto() | 28.114286 | 118 | 0.579776 | [
"MIT"
] | Rougnt/ArkNightAutoClick | Photo.py | 2,128 | Python |
#!/usr/bin/env python
# this node will be implemented on the master node
# this is a test script for drive motor
# in function of stop and front lights detection
# this script will be implemented in another node
# import libraries
import rospy,sys,time,atexit,numpy
from std_msgs.msg import String,Int16MultiArray
# define variables
avoidingVehicle = False
array = Int16MultiArray()
array.data = []
controlPub = rospy.Publisher("cmd",Int16MultiArray,queue_size=1)
def turnOffMotors():
array.data = [0,0,0,0]
controlPub.publish(array)
def setSpeed(motor1,motor2):
if motor1 == 0 and motor2 == 0:
turnOffMotors()
else:
array.data = [motor1,motor2,0,0]
controlPub.publish(array)
def avoidVehicle():
global avoidingVehicle
turnOffMotors()
avoidingVehicle = False
def callback(data):
global avoidingVehicle
rospy.loginfo(rospy.get_caller_id() +" Led control String received: %s",data.data)
if data.data == "stop" :
turnOffMotors()
elif (data.data == "front" and avoidingVehicle == False):
avoidingVehicle = True
avoidVehicle()
elif data.data == "w":
setSpeed(150,150)
elif data.data == "s":
turnOffMotors()
def led_control():
rospy.init_node('led_control',anonymous=True)
rospy.Subscriber('led_control_topic',String,callback)
try:
rospy.spin()
except KeyboardInterrupt:
print("Shutting down")
if __name__ == '__main__':
led_control()
| 23.844828 | 83 | 0.737527 | [
"BSD-3-Clause"
] | isarlab-department-engineering/ros-stop-light-control | deprecated/led_control.py | 1,383 | Python |
from dataclasses import dataclass, field
from typing import List
from bindings.wfs.range import Range
__NAMESPACE__ = "http://www.opengis.net/ows/1.1"
@dataclass
class AllowedValues:
"""List of all the valid values and/or ranges of values for this quantity.
For numeric quantities, signed values should be ordered from
negative infinity to positive infinity.
"""
class Meta:
namespace = "http://www.opengis.net/ows/1.1"
value: List[str] = field(
default_factory=list,
metadata={
"name": "Value",
"type": "Element",
},
)
range: List[Range] = field(
default_factory=list,
metadata={
"name": "Range",
"type": "Element",
},
)
| 23.272727 | 78 | 0.598958 | [
"Apache-2.0"
] | NIVANorge/s-enda-playground | catalog/bindings/wfs/allowed_values.py | 768 | Python |
import importlib
import inspect
import os
import sys
import warnings
from abc import ABCMeta, abstractmethod
from collections import namedtuple
import six
from dagster import check
from dagster.core.errors import DagsterInvariantViolationError
from dagster.serdes import whitelist_for_serdes
from dagster.seven import import_module_from_path
from dagster.utils import alter_sys_path, load_yaml_from_path
class CodePointer(six.with_metaclass(ABCMeta)):
@abstractmethod
def load_target(self):
pass
@abstractmethod
def describe(self):
pass
@staticmethod
def from_module(module_name, definition):
check.str_param(module_name, 'module_name')
check.str_param(definition, 'definition')
return ModuleCodePointer(module_name, definition)
@staticmethod
def from_python_package(module_name, attribute):
check.str_param(module_name, 'module_name')
check.str_param(attribute, 'attribute')
return PackageCodePointer(module_name, attribute)
@staticmethod
def from_python_file(python_file, definition, working_directory):
check.str_param(python_file, 'python_file')
check.str_param(definition, 'definition')
check.opt_str_param(working_directory, 'working_directory')
if working_directory:
return FileInDirectoryCodePointer(
python_file=python_file, fn_name=definition, working_directory=working_directory
)
return FileCodePointer(python_file=python_file, fn_name=definition)
@staticmethod
def from_legacy_repository_yaml(file_path):
check.str_param(file_path, 'file_path')
config = load_yaml_from_path(file_path)
repository_config = check.dict_elem(config, 'repository')
module_name = check.opt_str_elem(repository_config, 'module')
file_name = check.opt_str_elem(repository_config, 'file')
fn_name = check.str_elem(repository_config, 'fn')
return (
CodePointer.from_module(module_name, fn_name)
if module_name
# rebase file in config off of the path in the config file
else CodePointer.from_python_file(rebase_file(file_name, file_path), fn_name, None)
)
def rebase_file(relative_path_in_file, file_path_resides_in):
'''
In config files, you often put file paths that are meant to be relative
to the location of that config file. This does that calculation.
'''
check.str_param(relative_path_in_file, 'relative_path_in_file')
check.str_param(file_path_resides_in, 'file_path_resides_in')
return os.path.join(
os.path.dirname(os.path.abspath(file_path_resides_in)), relative_path_in_file
)
def load_python_file(python_file, working_directory):
'''
Takes a path to a python file and returns a loaded module
'''
check.str_param(python_file, 'python_file')
module_name = os.path.splitext(os.path.basename(python_file))[0]
cwd = sys.path[0]
if working_directory:
with alter_sys_path(to_add=[working_directory], to_remove=[cwd]):
return import_module_from_path(module_name, python_file)
error = None
sys_modules = {k: v for k, v in sys.modules.items()}
with alter_sys_path(to_add=[], to_remove=[cwd]):
try:
module = import_module_from_path(module_name, python_file)
except ImportError as ie:
# importing alters sys.modules in ways that may interfere with the import below, even
# if the import has failed. to work around this, we need to manually clear any modules
# that have been cached in sys.modules due to the speculative import call
# Also, we are mutating sys.modules instead of straight-up assigning to sys_modules,
# because some packages will do similar shenanigans to sys.modules (e.g. numpy)
to_delete = set(sys.modules) - set(sys_modules)
for key in to_delete:
del sys.modules[key]
error = ie
if not error:
return module
try:
module = import_module_from_path(module_name, python_file)
# if here, we were able to resolve the module with the working directory on the
# path, but should error because we may not always invoke from the same directory
# (e.g. from cron)
warnings.warn(
(
'Module `{module}` was resolved using the working directory. The ability to '
'implicitly load modules from the working directory is deprecated and '
'will be removed in a future release. Please explicitly specify the '
'`working_directory` config option in your workspace.yaml or install `{module}` to '
'your python environment.'
).format(module=error.name if hasattr(error, 'name') else module_name)
)
return module
except ImportError:
raise error
def load_python_module(module_name, warn_only=False, remove_from_path_fn=None):
check.str_param(module_name, 'module_name')
check.bool_param(warn_only, 'warn_only')
check.opt_callable_param(remove_from_path_fn, 'remove_from_path_fn')
error = None
remove_paths = remove_from_path_fn() if remove_from_path_fn else [] # hook for tests
remove_paths.insert(0, sys.path[0]) # remove the working directory
with alter_sys_path(to_add=[], to_remove=remove_paths):
try:
module = importlib.import_module(module_name)
except ImportError as ie:
error = ie
if error:
try:
module = importlib.import_module(module_name)
# if here, we were able to resolve the module with the working directory on the path,
# but should error because we may not always invoke from the same directory (e.g. from
# cron)
if warn_only:
warnings.warn(
(
'Module {module} was resolved using the working directory. The ability to '
'load uninstalled modules from the working directory is deprecated and '
'will be removed in a future release. Please use the python-file based '
'load arguments or install {module} to your python environment.'
).format(module=module_name)
)
else:
six.raise_from(
DagsterInvariantViolationError(
(
'Module {module} not found. Packages must be installed rather than '
'relying on the working directory to resolve module loading.'
).format(module=module_name)
),
error,
)
except ImportError as ie:
raise error
return module
@whitelist_for_serdes
class FileCodePointer(namedtuple('_FileCodePointer', 'python_file fn_name'), CodePointer):
def __new__(cls, python_file, fn_name):
return super(FileCodePointer, cls).__new__(
cls, check.str_param(python_file, 'python_file'), check.str_param(fn_name, 'fn_name'),
)
def load_target(self):
module = load_python_file(self.python_file, None)
if not hasattr(module, self.fn_name):
raise DagsterInvariantViolationError(
'{name} not found at module scope in file {file}.'.format(
name=self.fn_name, file=self.python_file
)
)
return getattr(module, self.fn_name)
def describe(self):
return '{self.python_file}::{self.fn_name}'.format(self=self)
def get_cli_args(self):
return '-f {python_file} -a {fn_name}'.format(
python_file=os.path.abspath(os.path.expanduser(self.python_file)), fn_name=self.fn_name
)
@whitelist_for_serdes
class FileInDirectoryCodePointer(
namedtuple('_FileInDirectoryCodePointer', 'python_file fn_name working_directory'), CodePointer
):
'''
Same as FileCodePointer, but with an additional field `working_directory` to help resolve
modules that are resolved from the python invocation directory. Required so other processes
that need to resolve modules (e.g. cron scheduler) can do so. This could be merged with the
`FileCodePointer` with `working_directory` as a None-able field, but not without changing
the origin_id for schedules. This would require purging schedule storage to resolve.
Should strongly consider merging when we need to do a storage migration.
https://github.com/dagster-io/dagster/issues/2673
'''
def __new__(cls, python_file, fn_name, working_directory):
return super(FileInDirectoryCodePointer, cls).__new__(
cls,
check.str_param(python_file, 'python_file'),
check.str_param(fn_name, 'fn_name'),
check.str_param(working_directory, 'working_directory'),
)
def load_target(self):
module = load_python_file(self.python_file, self.working_directory)
if not hasattr(module, self.fn_name):
raise DagsterInvariantViolationError(
'{name} not found at module scope in file {file}.'.format(
name=self.fn_name, file=self.python_file
)
)
return getattr(module, self.fn_name)
def describe(self):
return '{self.python_file}::{self.fn_name} -- [dir {self.working_directory}]'.format(
self=self
)
def get_cli_args(self):
return '-f {python_file} -a {fn_name} -d {directory}'.format(
python_file=os.path.abspath(os.path.expanduser(self.python_file)),
fn_name=self.fn_name,
directory=self.working_directory,
)
@whitelist_for_serdes
class ModuleCodePointer(namedtuple('_ModuleCodePointer', 'module fn_name'), CodePointer):
def __new__(cls, module, fn_name):
return super(ModuleCodePointer, cls).__new__(
cls, check.str_param(module, 'module'), check.str_param(fn_name, 'fn_name')
)
def load_target(self):
module = load_python_module(self.module, warn_only=True)
if not hasattr(module, self.fn_name):
raise DagsterInvariantViolationError(
'{name} not found in module {module}. dir: {dir}'.format(
name=self.fn_name, module=self.module, dir=dir(module)
)
)
return getattr(module, self.fn_name)
def describe(self):
return 'from {self.module} import {self.fn_name}'.format(self=self)
def get_cli_args(self):
return '-m {module} -a {fn_name}'.format(module=self.module, fn_name=self.fn_name)
@whitelist_for_serdes
class PackageCodePointer(namedtuple('_PackageCodePointer', 'module attribute'), CodePointer):
def __new__(cls, module, attribute):
return super(PackageCodePointer, cls).__new__(
cls, check.str_param(module, 'module'), check.str_param(attribute, 'attribute')
)
def load_target(self):
module = load_python_module(self.module)
if not hasattr(module, self.attribute):
raise DagsterInvariantViolationError(
'{name} not found in module {module}. dir: {dir}'.format(
name=self.attribute, module=self.module, dir=dir(module)
)
)
return getattr(module, self.attribute)
def describe(self):
return 'from {self.module} import {self.attribute}'.format(self=self)
def get_cli_args(self):
return '-m {module} -a {attribute}'.format(module=self.module, attribute=self.attribute)
def get_python_file_from_previous_stack_frame():
'''inspect.stack() lets us introspect the call stack; inspect.stack()[1] is the previous
stack frame.
In Python < 3.5, this is just a tuple, of which the python file of the previous frame is the 1st
element.
In Python 3.5+, this is a FrameInfo namedtuple instance; the python file of the previous frame
remains the 1st element.
'''
# Since this is now a function in this file, we need to go back two hops to find the
# callsite file.
previous_stack_frame = inspect.stack(0)[2]
# See: https://docs.python.org/3/library/inspect.html
if sys.version_info.major == 3 and sys.version_info.minor >= 5:
check.inst(previous_stack_frame, inspect.FrameInfo)
else:
check.inst(previous_stack_frame, tuple)
python_file = previous_stack_frame[1]
return os.path.abspath(python_file)
| 39.030769 | 100 | 0.659361 | [
"Apache-2.0"
] | idjevm/dagster | python_modules/dagster/dagster/core/code_pointer.py | 12,685 | Python |
# -*- coding: utf-8 -*-
import torch
from supar.utils.common import MIN
from supar.utils.fn import pad
from torch.autograd import Function
def tarjan(sequence):
r"""
Tarjan algorithm for finding Strongly Connected Components (SCCs) of a graph.
Args:
sequence (list):
List of head indices.
Yields:
A list of indices making up a SCC. All self-loops are ignored.
Examples:
>>> next(tarjan([2, 5, 0, 3, 1])) # (1 -> 5 -> 2 -> 1) is a cycle
[2, 5, 1]
"""
sequence = [-1] + sequence
# record the search order, i.e., the timestep
dfn = [-1] * len(sequence)
# record the the smallest timestep in a SCC
low = [-1] * len(sequence)
# push the visited into the stack
stack, onstack = [], [False] * len(sequence)
def connect(i, timestep):
dfn[i] = low[i] = timestep[0]
timestep[0] += 1
stack.append(i)
onstack[i] = True
for j, head in enumerate(sequence):
if head != i:
continue
if dfn[j] == -1:
yield from connect(j, timestep)
low[i] = min(low[i], low[j])
elif onstack[j]:
low[i] = min(low[i], dfn[j])
# a SCC is completed
if low[i] == dfn[i]:
cycle = [stack.pop()]
while cycle[-1] != i:
onstack[cycle[-1]] = False
cycle.append(stack.pop())
onstack[i] = False
# ignore the self-loop
if len(cycle) > 1:
yield cycle
timestep = [0]
for i in range(len(sequence)):
if dfn[i] == -1:
yield from connect(i, timestep)
def chuliu_edmonds(s):
r"""
ChuLiu/Edmonds algorithm for non-projective decoding :cite:`mcdonald-etal-2005-non`.
Some code is borrowed from `tdozat's implementation`_.
Descriptions of notations and formulas can be found in :cite:`mcdonald-etal-2005-non`.
Notes:
The algorithm does not guarantee to parse a single-root tree.
Args:
s (~torch.Tensor): ``[seq_len, seq_len]``.
Scores of all dependent-head pairs.
Returns:
~torch.Tensor:
A tensor with shape ``[seq_len]`` for the resulting non-projective parse tree.
.. _tdozat's implementation:
https://github.com/tdozat/Parser-v3
"""
s[0, 1:] = MIN
# prevent self-loops
s.diagonal()[1:].fill_(MIN)
# select heads with highest scores
tree = s.argmax(-1)
# return the cycle finded by tarjan algorithm lazily
cycle = next(tarjan(tree.tolist()[1:]), None)
# if the tree has no cycles, then it is a MST
if not cycle:
return tree
# indices of cycle in the original tree
cycle = torch.tensor(cycle)
# indices of noncycle in the original tree
noncycle = torch.ones(len(s)).index_fill_(0, cycle, 0)
noncycle = torch.where(noncycle.gt(0))[0]
def contract(s):
# heads of cycle in original tree
cycle_heads = tree[cycle]
# scores of cycle in original tree
s_cycle = s[cycle, cycle_heads]
# calculate the scores of cycle's potential dependents
# s(c->x) = max(s(x'->x)), x in noncycle and x' in cycle
s_dep = s[noncycle][:, cycle]
# find the best cycle head for each noncycle dependent
deps = s_dep.argmax(1)
# calculate the scores of cycle's potential heads
# s(x->c) = max(s(x'->x) - s(a(x')->x') + s(cycle)), x in noncycle and x' in cycle
# a(v) is the predecessor of v in cycle
# s(cycle) = sum(s(a(v)->v))
s_head = s[cycle][:, noncycle] - s_cycle.view(-1, 1) + s_cycle.sum()
# find the best noncycle head for each cycle dependent
heads = s_head.argmax(0)
contracted = torch.cat((noncycle, torch.tensor([-1])))
# calculate the scores of contracted graph
s = s[contracted][:, contracted]
# set the contracted graph scores of cycle's potential dependents
s[:-1, -1] = s_dep[range(len(deps)), deps]
# set the contracted graph scores of cycle's potential heads
s[-1, :-1] = s_head[heads, range(len(heads))]
return s, heads, deps
# keep track of the endpoints of the edges into and out of cycle for reconstruction later
s, heads, deps = contract(s)
# y is the contracted tree
y = chuliu_edmonds(s)
# exclude head of cycle from y
y, cycle_head = y[:-1], y[-1]
# fix the subtree with no heads coming from the cycle
# len(y) denotes heads coming from the cycle
subtree = y < len(y)
# add the nodes to the new tree
tree[noncycle[subtree]] = noncycle[y[subtree]]
# fix the subtree with heads coming from the cycle
subtree = ~subtree
# add the nodes to the tree
tree[noncycle[subtree]] = cycle[deps[subtree]]
# fix the root of the cycle
cycle_root = heads[cycle_head]
# break the cycle and add the root of the cycle to the tree
tree[cycle[cycle_root]] = noncycle[cycle_head]
return tree
def mst(scores, mask, multiroot=False):
r"""
MST algorithm for decoding non-projective trees.
This is a wrapper for ChuLiu/Edmonds algorithm.
The algorithm first runs ChuLiu/Edmonds to parse a tree and then have a check of multi-roots,
If ``multiroot=True`` and there indeed exist multi-roots, the algorithm seeks to find
best single-root trees by iterating all possible single-root trees parsed by ChuLiu/Edmonds.
Otherwise the resulting trees are directly taken as the final outputs.
Args:
scores (~torch.Tensor): ``[batch_size, seq_len, seq_len]``.
Scores of all dependent-head pairs.
mask (~torch.BoolTensor): ``[batch_size, seq_len]``.
The mask to avoid parsing over padding tokens.
The first column serving as pseudo words for roots should be ``False``.
multiroot (bool):
Ensures to parse a single-root tree If ``False``.
Returns:
~torch.Tensor:
A tensor with shape ``[batch_size, seq_len]`` for the resulting non-projective parse trees.
Examples:
>>> scores = torch.tensor([[[-11.9436, -13.1464, -6.4789, -13.8917],
[-60.6957, -60.2866, -48.6457, -63.8125],
[-38.1747, -49.9296, -45.2733, -49.5571],
[-19.7504, -23.9066, -9.9139, -16.2088]]])
>>> scores[:, 0, 1:] = MIN
>>> scores.diagonal(0, 1, 2)[1:].fill_(MIN)
>>> mask = torch.tensor([[False, True, True, True]])
>>> mst(scores, mask)
tensor([[0, 2, 0, 2]])
"""
batch_size, seq_len, _ = scores.shape
scores = scores.cpu().unbind()
preds = []
for i, length in enumerate(mask.sum(1).tolist()):
s = scores[i][:length+1, :length+1]
tree = chuliu_edmonds(s)
roots = torch.where(tree[1:].eq(0))[0] + 1
if not multiroot and len(roots) > 1:
s_root = s[:, 0]
s_best = MIN
s = s.index_fill(1, torch.tensor(0), MIN)
for root in roots:
s[:, 0] = MIN
s[root, 0] = s_root[root]
t = chuliu_edmonds(s)
s_tree = s[1:].gather(1, t[1:].unsqueeze(-1)).sum()
if s_tree > s_best:
s_best, tree = s_tree, t
preds.append(tree)
return pad(preds, total_length=seq_len).to(mask.device)
class SampledLogsumexp(Function):
@staticmethod
def forward(ctx, x, dim=-1):
ctx.dim = dim
ctx.save_for_backward(x)
return x.logsumexp(dim=dim)
@staticmethod
def backward(ctx, grad_output):
from torch.distributions import OneHotCategorical
x, dim = ctx.saved_tensors, ctx.dim
if ctx.needs_input_grad[0]:
return grad_output.unsqueeze(dim).mul(OneHotCategorical(logits=x.movedim(dim, -1)).sample().movedim(-1, dim)), None
return None, None
class Sparsemax(Function):
@staticmethod
def forward(ctx, x, dim=-1):
ctx.dim = dim
sorted_x, _ = x.sort(dim, True)
z = sorted_x.cumsum(dim) - 1
k = x.new_tensor(range(1, sorted_x.size(dim) + 1)).view(-1, *[1] * (x.dim() - 1)).transpose(0, dim)
k = (k * sorted_x).gt(z).sum(dim, True)
tau = z.gather(dim, k - 1) / k
p = torch.clamp(x - tau, 0)
ctx.save_for_backward(k, p)
return p
@staticmethod
def backward(ctx, grad_output):
k, p, dim = *ctx.saved_tensors, ctx.dim
grad = grad_output.masked_fill(p.eq(0), 0)
grad = torch.where(p.ne(0), grad - grad.sum(dim, True) / k, grad)
return grad, None
sampled_logsumexp = SampledLogsumexp.apply
sparsemax = Sparsemax.apply
| 34.55814 | 127 | 0.574361 | [
"MIT"
] | zysite/parser | supar/structs/fn.py | 8,916 | Python |
# -*- coding: latin-1 -*-
# -----------------------------------------------------------------------------
# Copyright 2009-2011 Stephen Tiedemann <[email protected]>
#
# Licensed under the EUPL, Version 1.1 or - as soon they
# will be approved by the European Commission - subsequent
# versions of the EUPL (the "Licence");
# You may not use this work except in compliance with the
# Licence.
# You may obtain a copy of the Licence at:
#
# http://www.osor.eu/eupl
#
# Unless required by applicable law or agreed to in
# writing, software distributed under the Licence is
# distributed on an "AS IS" basis,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied.
# See the Licence for the specific language governing
# permissions and limitations under the Licence.
# -----------------------------------------------------------------------------
import logging
log = logging.getLogger(__name__)
import time
from types import *
import threading
import collections
import random
import nfc.clf
import nfc.dep
# local imports
from tco import *
from pdu import *
from err import *
from opt import *
RAW_ACCESS_POINT, LOGICAL_DATA_LINK, DATA_LINK_CONNECTION = range(3)
wks_map = {
"urn:nfc:sn:sdp" : 1,
"urn:nfc:sn:ip" : 2,
"urn:nfc:sn:obex": 3,
"urn:nfc:sn:snep": 4}
class ServiceAccessPoint(object):
def __init__(self, addr, llc):
self.llc = llc
self.addr = addr
self.sock_list = collections.deque()
self.send_list = collections.deque()
def __str__(self):
return "SAP {0:>2}".format(self.addr)
@property
def mode(self):
with self.llc.lock:
try:
if isinstance(self.sock_list[0], RawAccessPoint):
return RAW_ACCESS_POINT
if isinstance(self.sock_list[0], LogicalDataLink):
return LOGICAL_DATA_LINK
if isinstance(self.sock_list[0], DataLinkConnection):
return DATA_LINK_CONNECTION
except IndexError: return 0
def insert_socket(self, socket):
with self.llc.lock:
try: insertable = type(socket) == type(self.sock_list[0])
except IndexError: insertable = True
if insertable:
socket.bind(self.addr)
self.sock_list.appendleft(socket)
else: log.error("can't insert socket of different type")
return insertable
def remove_socket(self, socket):
assert socket.addr == self.addr
socket.close()
with self.llc.lock:
try: self.sock_list.remove(socket)
except ValueError: pass
if len(self.sock_list) == 0:
# completely remove this sap
self.llc.sap[self.addr] = None
def send(self, pdu):
self.send_list.append(pdu)
def shutdown(self):
while True:
try: socket = self.sock_list.pop()
except IndexError: return
log.debug("shutdown socket %s" % str(socket))
socket.bind(None); socket.close()
#
# enqueue() and dequeue() are called from llc run thread
#
def enqueue(self, pdu):
with self.llc.lock:
if isinstance(pdu, Connect):
for socket in self.sock_list:
if socket.state.LISTEN:
socket.enqueue(pdu)
return
else:
for socket in self.sock_list:
if pdu.ssap == socket.peer or socket.peer is None:
socket.enqueue(pdu)
return
if pdu.type in connection_mode_pdu_types:
self.send(DisconnectedMode(pdu.ssap, pdu.dsap, reason=1))
def dequeue(self, max_size):
with self.llc.lock:
for socket in self.sock_list:
#print "dequeue from", socket
pdu = socket.dequeue(max_size)
if pdu: return pdu
else:
try: return self.send_list.popleft()
except IndexError: pass
def sendack(self, max_size):
with self.llc.lock:
for socket in self.sock_list:
pdu = socket.sendack(max_size)
if pdu: return pdu
class ServiceDiscovery(object):
def __init__(self, llc):
self.llc = llc
self.snl = dict()
self.tids = range(256)
self.resp = threading.Condition(self.llc.lock)
self.sent = dict()
self.sdreq = collections.deque()
self.sdres = collections.deque()
self.dmpdu = collections.deque()
def __str__(self):
return "SAP 1"
@property
def mode(self):
return LOGICAL_DATA_LINK
def resolve(self, name):
with self.resp:
if self.snl is None: return None
log.debug("resolve service name '{0}'".format(name))
try: return self.snl[name]
except KeyError: pass
tid = random.choice(self.tids)
self.tids.remove(tid)
self.sdreq.append((tid, name))
while not self.snl is None and not name in self.snl:
self.resp.wait()
return None if self.snl is None else self.snl[name]
#
# enqueue() and dequeue() are called from llc run thread
#
def enqueue(self, pdu):
with self.llc.lock:
if isinstance(pdu, ServiceNameLookup) and not self.snl is None:
for tid, sap in pdu.sdres:
try: name = self.sent[tid]
except KeyError: pass
else:
log.debug("resolved '{0}' to remote addr {1}"
.format(name, sap))
self.snl[name] = sap
self.tids.append(tid)
self.resp.notify_all()
for tid, name in pdu.sdreq:
try: sap = self.llc.snl[name]
except KeyError: sap = 0
self.sdres.append((tid, sap))
def dequeue(self, max_size):
if max_size < 2:
return None
with self.llc.lock:
if len(self.sdres) > 0 or len(self.sdreq) > 0:
pdu = ServiceNameLookup(dsap=1, ssap=1)
max_size -= len(pdu)
while max_size > 0:
try: pdu.sdres.append(self.sdres.popleft())
except IndexError: break
for i in range(len(self.sdreq)):
tid, name = self.sdreq[0]
if 1 + len(name) > max_size:
self.sdreq.rotate(-1)
else:
pdu.sdreq.append(self.sdreq.popleft())
self.sent[tid] = name
return pdu
if len(self.dmpdu) > 0 and max_size >= 2:
return self.dmpdu.popleft()
def shutdown(self):
with self.llc.lock:
self.snl = None
self.resp.notify_all()
class LogicalLinkController(object):
def __init__(self, recv_miu=248, send_lto=500, send_agf=True,
symm_log=True):
self.lock = threading.RLock()
self.cfg = dict()
self.cfg['recv-miu'] = recv_miu
self.cfg['send-lto'] = send_lto
self.cfg['send-agf'] = send_agf
self.cfg['symm-log'] = symm_log
self.snl = dict({"urn:nfc:sn:sdp" : 1})
self.sap = 64 * [None]
self.sap[0] = ServiceAccessPoint(0, self)
self.sap[1] = ServiceDiscovery(self)
def __str__(self):
local = "Local(MIU={miu}, LTO={lto}ms)".format(
miu=self.cfg.get('recv-miu'), lto=self.cfg.get('send-lto'))
remote = "Remote(MIU={miu}, LTO={lto}ms)".format(
miu=self.cfg.get('send-miu'), lto=self.cfg.get('recv-lto'))
return "LLC: {local} {remote}".format(local=local, remote=remote)
def activate(self, mac):
assert type(mac) in (nfc.dep.Initiator, nfc.dep.Target)
self.mac = None
miu = self.cfg['recv-miu']
lto = self.cfg['send-lto']
wks = 1+sum(sorted([1<<sap for sap in self.snl.values() if sap < 15]))
pax = ParameterExchange(version=(1,1), miu=miu, lto=lto, wks=wks)
if type(mac) == nfc.dep.Initiator:
gb = mac.activate(gbi='Ffm'+pax.to_string()[2:])
self.run = self.run_as_initiator
role = "Initiator"
if type(mac) == nfc.dep.Target:
gb = mac.activate(gbt='Ffm'+pax.to_string()[2:], wt=9)
self.run = self.run_as_target
role = "Target"
if gb is not None and gb.startswith('Ffm') and len(gb) >= 6:
info = ["LLCP Link established as NFC-DEP {0}".format(role)]
info.append("Local LLCP Settings")
info.append(" LLCP Version: {0[0]}.{0[1]}".format(pax.version))
info.append(" Link Timeout: {0} ms".format(pax.lto))
info.append(" Max Inf Unit: {0} octet".format(pax.miu))
info.append(" Service List: {0:016b}".format(pax.wks))
pax = ProtocolDataUnit.from_string("\x00\x40" + str(gb[3:]))
info.append("Remote LLCP Settings")
info.append(" LLCP Version: {0[0]}.{0[1]}".format(pax.version))
info.append(" Link Timeout: {0} ms".format(pax.lto))
info.append(" Max Inf Unit: {0} octet".format(pax.miu))
info.append(" Service List: {0:016b}".format(pax.wks))
log.info('\n'.join(info))
self.cfg['rcvd-ver'] = pax.version
self.cfg['send-miu'] = pax.miu
self.cfg['recv-lto'] = pax.lto
self.cfg['send-wks'] = pax.wks
self.cfg['send-lsc'] = pax.lsc
log.debug("llc cfg {0}".format(self.cfg))
if type(mac) == nfc.dep.Initiator and mac.rwt is not None:
max_rwt = 4096/13.56E6 * 2**10
if mac.rwt > max_rwt:
log.warning("NFC-DEP RWT {0:.3f} exceeds max {1:.3f} sec"
.format(mac.rwt, max_rwt))
self.mac = mac
return bool(self.mac)
def terminate(self, reason):
log.debug("llcp link termination caused by {0}".format(reason))
if reason == "local choice":
self.exchange(Disconnect(0, 0), timeout=0.1)
self.mac.deactivate()
elif reason == "remote choice":
self.mac.deactivate()
# shutdown local services
for i in range(63, -1, -1):
if not self.sap[i] is None:
log.debug("closing service access point %d" % i)
self.sap[i].shutdown()
self.sap[i] = None
def exchange(self, pdu, timeout):
if not isinstance(pdu, Symmetry) or self.cfg.get('symm-log') is True:
log.debug("SEND {0}".format(pdu))
data = pdu.to_string() if pdu else None
try:
data = self.mac.exchange(data, timeout)
if data is None: return None
except nfc.clf.DigitalProtocolError as error:
log.debug("{0!r}".format(error))
return None
pdu = ProtocolDataUnit.from_string(data)
if not isinstance(pdu, Symmetry) or self.cfg.get('symm-log') is True:
log.debug("RECV {0}".format(pdu))
return pdu
def run_as_initiator(self, terminate=lambda: False):
recv_timeout = 1E-3 * (self.cfg['recv-lto'] + 10)
symm = 0
try:
pdu = self.collect(delay=0.01)
while not terminate():
if pdu is None: pdu = Symmetry()
pdu = self.exchange(pdu, recv_timeout)
if pdu is None:
return self.terminate(reason="link disruption")
if pdu == Disconnect(0, 0):
return self.terminate(reason="remote choice")
symm = symm + 1 if type(pdu) == Symmetry else 0
self.dispatch(pdu)
pdu = self.collect(delay=0.001)
if pdu is None and symm >= 10:
pdu = self.collect(delay=0.05)
else:
self.terminate(reason="local choice")
except KeyboardInterrupt:
print # move to new line
self.terminate(reason="local choice")
raise KeyboardInterrupt
except IOError:
self.terminate(reason="input/output error")
raise SystemExit
finally:
log.debug("llc run loop terminated on initiator")
def run_as_target(self, terminate=lambda: False):
recv_timeout = 1E-3 * (self.cfg['recv-lto'] + 10)
symm = 0
try:
pdu = None
while not terminate():
pdu = self.exchange(pdu, recv_timeout)
if pdu is None:
return self.terminate(reason="link disruption")
if pdu == Disconnect(0, 0):
return self.terminate(reason="remote choice")
symm = symm + 1 if type(pdu) == Symmetry else 0
self.dispatch(pdu)
pdu = self.collect(delay=0.001)
if pdu is None and symm >= 10:
pdu = self.collect(delay=0.05)
if pdu is None: pdu = Symmetry()
else:
self.terminate(reason="local choice")
except KeyboardInterrupt:
print # move to new line
self.terminate(reason="local choice")
raise KeyboardInterrupt
except IOError:
self.terminate(reason="input/output error")
raise SystemExit
finally:
log.debug("llc run loop terminated on target")
def collect(self, delay=None):
if delay: time.sleep(delay)
pdu_list = list()
max_data = None
with self.lock:
active_sap_list = [sap for sap in self.sap if sap is not None]
for sap in active_sap_list:
#log.debug("query sap {0}, max_data={1}"
# .format(sap, max_data))
pdu = sap.dequeue(max_data if max_data else 2179)
if pdu is not None:
if self.cfg['send-agf'] == False:
return pdu
pdu_list.append(pdu)
if max_data is None:
max_data = self.cfg["send-miu"] + 2
max_data -= len(pdu)
if max_data < bool(len(pdu_list)==1) * 2 + 2 + 2:
break
else: max_data = self.cfg["send-miu"] + 2
for sap in active_sap_list:
if sap.mode == DATA_LINK_CONNECTION:
pdu = sap.sendack(max_data)
if not pdu is None:
if self.cfg['send-agf'] == False:
return pdu
pdu_list.append(pdu)
max_data -= len(pdu)
if max_data < bool(len(pdu_list)==1) * 2 + 2 + 3:
break
if len(pdu_list) > 1:
return AggregatedFrame(aggregate=pdu_list)
if len(pdu_list) == 1:
return pdu_list[0]
return None
def dispatch(self, pdu):
if isinstance(pdu, Symmetry):
return
if isinstance(pdu, AggregatedFrame):
if pdu.dsap == 0 and pdu.ssap == 0:
[log.debug(" " + str(p)) for p in pdu]
[self.dispatch(p) for p in pdu]
return
if isinstance(pdu, Connect) and pdu.dsap == 1:
# connect-by-name
addr = self.snl.get(pdu.sn)
if not addr or self.sap[addr] is None:
log.debug("no service named '{0}'".format(pdu.sn))
pdu = DisconnectedMode(pdu.ssap, 1, reason=2)
self.sap[1].dmpdu.append(pdu)
return
pdu = Connect(dsap=addr, ssap=pdu.ssap, rw=pdu.rw, miu=pdu.miu)
with self.lock:
sap = self.sap[pdu.dsap]
if sap:
sap.enqueue(pdu)
return
log.debug("discard PDU {0}".format(str(pdu)))
return
def resolve(self, name):
return self.sap[1].resolve(name)
def socket(self, socket_type):
if socket_type == RAW_ACCESS_POINT:
return RawAccessPoint(recv_miu=self.cfg["recv-miu"])
if socket_type == LOGICAL_DATA_LINK:
return LogicalDataLink(recv_miu=self.cfg["recv-miu"])
if socket_type == DATA_LINK_CONNECTION:
return DataLinkConnection(recv_miu=128, recv_win=1)
def setsockopt(self, socket, option, value):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if option == SO_RCVMIU:
value = min(value, self.cfg['recv-miu'])
socket.setsockopt(option, value)
return socket.getsockopt(option)
def getsockopt(self, socket, option):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if isinstance(socket, LogicalDataLink):
# FIXME: set socket send miu when activated
socket.send_miu = self.cfg['send-miu']
if isinstance(socket, RawAccessPoint):
# FIXME: set socket send miu when activated
socket.send_miu = self.cfg['send-miu']
return socket.getsockopt(option)
def bind(self, socket, addr_or_name=None):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not socket.addr is None:
raise Error(errno.EINVAL)
if addr_or_name is None:
self._bind_by_none(socket)
elif type(addr_or_name) is IntType:
self._bind_by_addr(socket, addr_or_name)
elif type(addr_or_name) is StringType:
self._bind_by_name(socket, addr_or_name)
else: raise Error(errno.EFAULT)
def _bind_by_none(self, socket):
with self.lock:
try: addr = 32 + self.sap[32:64].index(None)
except ValueError: raise Error(errno.EAGAIN)
else:
socket.bind(addr)
self.sap[addr] = ServiceAccessPoint(addr, self)
self.sap[addr].insert_socket(socket)
def _bind_by_addr(self, socket, addr):
with self.lock:
if addr in range(32, 64):
if self.sap[addr] is None:
socket.bind(addr)
self.sap[addr] = ServiceAccessPoint(addr, self)
self.sap[addr].insert_socket(socket)
else: raise Error(errno.EADDRINUSE)
else: raise Error(errno.EACCES)
def _bind_by_name(self, socket, name):
if not (name.startswith("urn:nfc:sn") or
name.startswith("urn:nfc:xsn") or
name == "com.android.npp"): # invalid name but legacy
raise Error(errno.EFAULT)
with self.lock:
if self.snl.get(name) != None:
raise Error(errno.EADDRINUSE)
addr = wks_map.get(name)
if addr is None:
try: addr = 16 + self.sap[16:32].index(None)
except ValueError: raise Error(errno.EADDRNOTAVAIL)
socket.bind(addr)
self.sap[addr] = ServiceAccessPoint(addr, self)
self.sap[addr].insert_socket(socket)
self.snl[name] = addr
def connect(self, socket, dest):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not socket.is_bound:
self.bind(socket)
socket.connect(dest)
log.debug("connected ({0} ===> {1})".format(socket.addr, socket.peer))
def listen(self, socket, backlog):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not isinstance(socket, DataLinkConnection):
raise Error(errno.EOPNOTSUPP)
if not type(backlog) == IntType:
raise TypeError("backlog must be integer")
if backlog < 0:
raise ValueError("backlog mmust not be negative")
backlog = min(backlog, 16)
if not socket.is_bound:
self.bind(socket)
socket.listen(backlog)
def accept(self, socket):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not isinstance(socket, DataLinkConnection):
raise Error(errno.EOPNOTSUPP)
while True:
client = socket.accept()
if not client.is_bound:
self.bind(client)
if self.sap[client.addr].insert_socket(client):
log.debug("new data link connection ({0} <=== {1})"
.format(client.addr, client.peer))
return client
else:
pdu = DisconnectedMode(client.peer, socket.addr, reason=0x20)
super(DataLinkConnection, socket).send(pdu)
def send(self, socket, message):
return self.sendto(socket, message, socket.peer)
def sendto(self, socket, message, dest):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if isinstance(socket, RawAccessPoint):
if not isinstance(message, ProtocolDataUnit):
raise TypeError("message must be a pdu on raw access point")
if not socket.is_bound:
self.bind(socket)
# FIXME: set socket send miu when activated
socket.send_miu = self.cfg['send-miu']
return socket.send(message)
if not type(message) == StringType:
raise TypeError("sendto() argument *message* must be a string")
if isinstance(socket, LogicalDataLink):
if dest is None:
raise Error(errno.EDESTADDRREQ)
if not socket.is_bound:
self.bind(socket)
# FIXME: set socket send miu when activated
socket.send_miu = self.cfg['send-miu']
return socket.sendto(message, dest)
if isinstance(socket, DataLinkConnection):
return socket.send(message)
def recv(self, socket):
message, sender = self.recvfrom(socket)
return message
def recvfrom(self, socket):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not (socket.addr and self.sap[socket.addr]):
raise Error(errno.EBADF)
if isinstance(socket, RawAccessPoint):
return (socket.recv(), None)
if isinstance(socket, LogicalDataLink):
return socket.recvfrom()
if isinstance(socket, DataLinkConnection):
return (socket.recv(), socket.peer)
def poll(self, socket, event, timeout=None):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if not (socket.addr and self.sap[socket.addr]):
raise Error(errno.EBADF)
return socket.poll(event, timeout)
def close(self, socket):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
if socket.is_bound:
self.sap[socket.addr].remove_socket(socket)
else: socket.close()
def getsockname(self, socket):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
return socket.addr
def getpeername(self, socket):
if not isinstance(socket, TransmissionControlObject):
raise Error(errno.ENOTSOCK)
return socket.peer
| 38.011146 | 79 | 0.548071 | [
"MIT"
] | javgh/bitpay-brick | src/nfc/llcp/llc.py | 23,871 | Python |
import os
import cv2
import numpy as np
import time
cap = cv2.VideoCapture(0)
file_index = 0
if not os.path.exists("calibration_data/calibration_frames/"):
os.makedirs("calibration_data/calibration_frames/")
while True:
ret, frame = cap.read()
# display the resulting frame
cv2.imshow("frame", frame)
key = cv2.waitKey(100) & 0xFF
if key == ord("q"):
break
if key == ord("s"):
filename = "calibration_data/calibration_frames/frame_" + str(file_index) + str(time.time()) + ".png"
cv2.imwrite(filename, frame)
print(f"saved frame: {filename}")
file_index += 1
cv2.destroyAllWindows()
cap.release()
| 21.677419 | 109 | 0.654762 | [
"MIT"
] | federicozappone/sun_sensor_heading_estimation | grab_calibration_frames.py | 672 | Python |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
QGIS plugin 'Graphium'
/***************************************************************************
*
* Copyright 2020 Simon Gröchenig @ Salzburg Research
* eMail [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***************************************************************************/
"""
from enum import Enum
class OsmHighwayTypes(Enum):
MOTORWAY = 'motorway'
MOTORWAY_LINK = 'motorway_link'
TRUNK = 'trunk'
TRUNK_LINK = 'trunk_link'
PRIMARY = 'primary'
PRIMARY_LINK = 'primary_link'
SECONDARY = 'secondary'
SECONDARY_LINK = 'secondary_link'
TERTIARY = 'tertiary'
TERTIARY_LINK = 'tertiary_link'
UNCLASSIFIED = 'unclassified'
RESIDENTIAL = 'residential'
LIVING_STREET = 'living_street'
SERVICE = 'service'
PEDESTRIAN = 'pedestrian'
TRACK = 'track'
BUS_GUIDEWAY = 'bus_guideway'
FOOTWAY = 'footway'
BRIDLEWAY = 'bridleway'
STEPS = 'steps'
CORRIDOR = 'dorridor'
PATH = 'path'
SIDEWALK = 'sidewalk'
CYCLEWAY = 'cycleway'
| 31.092593 | 77 | 0.59321 | [
"Apache-2.0"
] | graphium-project/graphium-qgis-plugin | graphium/graph_management/model/osm_highway_types.py | 1,680 | Python |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Creates datasets from the NIST sdf files and makes experiment setup jsons.
This module first breaks up the main NIST library dataset into a train/
validation/test set, and the replicates library into a validation and test set.
As all the molecules in the replicates file are also in the main NIST library,
the mainlib datasets will exclude inchikeys from the replicates library. All the
molecules in both datasets are to be included in one of these datasets, unless
an argument is passed for mainlib_maximum_num_molecules_to_use or
replicates_maximum_num_molecules_to_use.
The component datasets are saved as TFRecords, by the names defined in
dataset_setup_constants and the library from which the data came
(e.g. mainlib_train_from_mainlib.tfrecord). This will result in 7 TFRecord files
total, one each for the train/validation/test splits from the main library, and
two each for the replicates validation/test splits, one with its data from the
mainlib NIST file, and the other from the replicates file.
For each experiment setup included in
dataset_setup_constants.EXPERIMENT_SETUPS_LIST, a json file is written. This
json file name the files to be used for each part of the experiment, i.e.
library matching, spectra prediction.
Note: Reading sdf files from cns currently not supported.
Example usage:
make_train_test_split.py \
--main_sdf_name=testdata/test_14_mend.sdf
--replicates_sdf_name=testdata/test_2_mend.sdf \
--output_master_dir=<output_dir_name>
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import random
from absl import app
from absl import flags
import dataset_setup_constants as ds_constants
import mass_spec_constants as ms_constants
import parse_sdf_utils
import train_test_split_utils
import six
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_string(
'main_sdf_name', 'testdata/test_14_mend.sdf',
'specify full path of sdf file to parse, to be used for'
' training sets, and validation/test sets')
flags.DEFINE_string(
'replicates_sdf_name',
'testdata/test_2_mend.sdf',
'specify full path of a second sdf file to parse, to be'
' used for the vaildation/test set. Molecules in this sdf'
' will be excluded from the main train/val/test sets.')
# Note: For family based splitting, all molecules passing the filter will be
# placed in validation/test datasets, and then split according to the relative
# ratio between the validation/test fractions. If these are both equal to 0.0,
# these values will be over written to 0.5 and 0.5.
flags.DEFINE_list(
'main_train_val_test_fractions', '1.0,0.0,0.0',
'specify how large to make the train, val, and test sets'
' as a fraction of the whole dataset.')
flags.DEFINE_integer('mainlib_maximum_num_molecules_to_use', None,
'specify how many total samples to use for parsing')
flags.DEFINE_integer('replicates_maximum_num_molecules_to_use', None,
'specify how many total samples to use for parsing')
flags.DEFINE_list(
'replicates_train_val_test_fractions', '0.0,0.5,0.5',
'specify fraction of replicates molecules to use for'
' for the three replicates sample files.')
flags.DEFINE_enum(
'splitting_type', 'random', ['random', 'steroid', 'diazo'],
'specify splitting method to use for creating '
'training/validation/test sets')
flags.DEFINE_string('output_master_dir', '/tmp/output_dataset_dir',
'specify directory to save records')
flags.DEFINE_integer('max_atoms', ms_constants.MAX_ATOMS,
'specify maximum number of atoms to allow')
flags.DEFINE_integer('max_mass_spec_peak_loc', ms_constants.MAX_PEAK_LOC,
'specify greatest m/z spectrum peak to allow')
INCHIKEY_FILENAME_END = '.inchikey.txt'
TFRECORD_FILENAME_END = '.tfrecord'
NP_LIBRARY_ARRAY_END = '.spectra_library.npy'
FROM_MAINLIB_FILENAME_MODIFIER = '_from_mainlib'
FROM_REPLICATES_FILENAME_MODIFIER = '_from_replicates'
def make_mainlib_replicates_train_test_split(
mainlib_mol_list,
replicates_mol_list,
splitting_type,
mainlib_fractions,
replicates_fractions,
mainlib_maximum_num_molecules_to_use=None,
replicates_maximum_num_molecules_to_use=None,
rseed=42):
"""Makes train/validation/test inchikey lists from two lists of rdkit.Mol.
Args:
mainlib_mol_list : list of molecules from main library
replicates_mol_list : list of molecules from replicates library
splitting_type : type of splitting to use for validation splits.
mainlib_fractions : TrainValTestFractions namedtuple
holding desired fractions for train/val/test split of mainlib
replicates_fractions : TrainValTestFractions namedtuple
holding desired fractions for train/val/test split of replicates.
For the replicates set, the train fraction should be set to 0.
mainlib_maximum_num_molecules_to_use : Largest number of molecules to use
when making datasets from mainlib
replicates_maximum_num_molecules_to_use : Largest number of molecules to use
when making datasets from replicates
rseed : random seed for shuffling
Returns:
main_inchikey_dict : Dict that is keyed by inchikey, containing a list of
rdkit.Mol objects corresponding to that inchikey from the mainlib
replicates_inchikey_dict : Dict that is keyed by inchikey, containing a list
of rdkit.Mol objects corresponding to that inchikey from the replicates
library
main_replicates_split_inchikey_lists_dict : dict with keys :
'mainlib_train', 'mainlib_validation', 'mainlib_test',
'replicates_train', 'replicates_validation', 'replicates_test'
Values are lists of inchikeys corresponding to each dataset.
"""
random.seed(rseed)
main_inchikey_dict = train_test_split_utils.make_inchikey_dict(
mainlib_mol_list)
main_inchikey_list = main_inchikey_dict.keys()
if six.PY3:
main_inchikey_list = list(main_inchikey_list)
if mainlib_maximum_num_molecules_to_use is not None:
main_inchikey_list = random.sample(main_inchikey_list,
mainlib_maximum_num_molecules_to_use)
replicates_inchikey_dict = train_test_split_utils.make_inchikey_dict(
replicates_mol_list)
replicates_inchikey_list = replicates_inchikey_dict.keys()
if six.PY3:
replicates_inchikey_list = list(replicates_inchikey_list)
if replicates_maximum_num_molecules_to_use is not None:
replicates_inchikey_list = random.sample(
replicates_inchikey_list, replicates_maximum_num_molecules_to_use)
# Make train/val/test splits for main dataset.
main_train_validation_test_inchikeys = (
train_test_split_utils.make_train_val_test_split_inchikey_lists(
main_inchikey_list,
main_inchikey_dict,
mainlib_fractions,
holdout_inchikey_list=replicates_inchikey_list,
splitting_type=splitting_type))
# Make train/val/test splits for replicates dataset.
replicates_validation_test_inchikeys = (
train_test_split_utils.make_train_val_test_split_inchikey_lists(
replicates_inchikey_list,
replicates_inchikey_dict,
replicates_fractions,
splitting_type=splitting_type))
component_inchikey_dict = {
ds_constants.MAINLIB_TRAIN_BASENAME:
main_train_validation_test_inchikeys.train,
ds_constants.MAINLIB_VALIDATION_BASENAME:
main_train_validation_test_inchikeys.validation,
ds_constants.MAINLIB_TEST_BASENAME:
main_train_validation_test_inchikeys.test,
ds_constants.REPLICATES_TRAIN_BASENAME:
replicates_validation_test_inchikeys.train,
ds_constants.REPLICATES_VALIDATION_BASENAME:
replicates_validation_test_inchikeys.validation,
ds_constants.REPLICATES_TEST_BASENAME:
replicates_validation_test_inchikeys.test
}
train_test_split_utils.assert_all_lists_mutally_exclusive(
list(component_inchikey_dict.values()))
# Test that the set of the 5 component inchikey lists is equal to the set of
# inchikeys in the main library.
all_inchikeys_in_components = []
for ikey_list in list(component_inchikey_dict.values()):
for ikey in ikey_list:
all_inchikeys_in_components.append(ikey)
assert set(main_inchikey_list + replicates_inchikey_list) == set(
all_inchikeys_in_components
), ('The inchikeys in the original inchikey dictionary are not all included'
' in the train/val/test component libraries')
return (main_inchikey_dict, replicates_inchikey_dict, component_inchikey_dict)
def write_list_of_inchikeys(inchikey_list, base_name, output_dir):
"""Write list of inchikeys as a text file."""
inchikey_list_name = base_name + INCHIKEY_FILENAME_END
with tf.gfile.Open(os.path.join(output_dir, inchikey_list_name),
'w') as writer:
for inchikey in inchikey_list:
writer.write('%s\n' % inchikey)
def write_all_dataset_files(inchikey_dict,
inchikey_list,
base_name,
output_dir,
max_atoms,
max_mass_spec_peak_loc,
make_library_array=False):
"""Helper function for writing all the files associated with a TFRecord.
Args:
inchikey_dict : Full dictionary keyed by inchikey containing lists of
rdkit.Mol objects
inchikey_list : List of inchikeys to include in dataset
base_name : Base name for the dataset
output_dir : Path for saving all TFRecord files
max_atoms : Maximum number of atoms to include for a given molecule
max_mass_spec_peak_loc : Largest m/z peak to include in a spectra.
make_library_array : Flag for whether to make library array
Returns:
Saves 3 files:
basename.tfrecord : a TFRecord file,
basename.inchikey.txt : a text file with all the inchikeys in the dataset
basename.tfrecord.info: a text file with one line describing
the length of the TFRecord file.
Also saves if make_library_array is set:
basename.npz : see parse_sdf_utils.write_dicts_to_example
"""
record_name = base_name + TFRECORD_FILENAME_END
mol_list = train_test_split_utils.make_mol_list_from_inchikey_dict(
inchikey_dict, inchikey_list)
if make_library_array:
library_array_pathname = base_name + NP_LIBRARY_ARRAY_END
parse_sdf_utils.write_dicts_to_example(
mol_list, os.path.join(output_dir, record_name),
max_atoms, max_mass_spec_peak_loc,
os.path.join(output_dir, library_array_pathname))
else:
parse_sdf_utils.write_dicts_to_example(
mol_list, os.path.join(output_dir, record_name), max_atoms,
max_mass_spec_peak_loc)
write_list_of_inchikeys(inchikey_list, base_name, output_dir)
parse_sdf_utils.write_info_file(mol_list, os.path.join(
output_dir, record_name))
def write_mainlib_split_datasets(component_inchikey_dict, mainlib_inchikey_dict,
output_dir, max_atoms, max_mass_spec_peak_loc):
"""Write all train/val/test set TFRecords from main NIST sdf file."""
for component_kwarg in component_inchikey_dict.keys():
component_mainlib_filename = (
component_kwarg + FROM_MAINLIB_FILENAME_MODIFIER)
if component_kwarg == ds_constants.MAINLIB_TRAIN_BASENAME:
write_all_dataset_files(
mainlib_inchikey_dict,
component_inchikey_dict[component_kwarg],
component_mainlib_filename,
output_dir,
max_atoms,
max_mass_spec_peak_loc,
make_library_array=True)
else:
write_all_dataset_files(mainlib_inchikey_dict,
component_inchikey_dict[component_kwarg],
component_mainlib_filename, output_dir, max_atoms,
max_mass_spec_peak_loc)
def write_replicates_split_datasets(component_inchikey_dict,
replicates_inchikey_dict, output_dir,
max_atoms, max_mass_spec_peak_loc):
"""Write replicates val/test set TFRecords from replicates sdf file."""
for component_kwarg in [
ds_constants.REPLICATES_VALIDATION_BASENAME,
ds_constants.REPLICATES_TEST_BASENAME
]:
component_replicates_filename = (
component_kwarg + FROM_REPLICATES_FILENAME_MODIFIER)
write_all_dataset_files(replicates_inchikey_dict,
component_inchikey_dict[component_kwarg],
component_replicates_filename, output_dir,
max_atoms, max_mass_spec_peak_loc)
def combine_inchikey_sets(dataset_subdivision_list, dataset_split_dict):
"""A function to combine lists of inchikeys that are values from a dict.
Args:
dataset_subdivision_list: List of keys in dataset_split_dict to combine
into one list
dataset_split_dict: dict containing keys in dataset_subdivision_list, with
lists of inchikeys as values.
Returns:
A list of inchikeys.
"""
dataset_inchikey_list = []
for dataset_subdivision_name in dataset_subdivision_list:
dataset_inchikey_list.extend(dataset_split_dict[dataset_subdivision_name])
return dataset_inchikey_list
def check_experiment_setup(experiment_setup_dict, component_inchikey_dict):
"""Validates experiment setup for given lists of inchikeys."""
# Check that the union of the library matching observed and library
# matching predicted sets are equal to the set of inchikeys in the
# mainlib_inchikey_dict
all_inchikeys_in_library = (
combine_inchikey_sets(
experiment_setup_dict[ds_constants.LIBRARY_MATCHING_OBSERVED_KEY],
component_inchikey_dict) +
combine_inchikey_sets(
experiment_setup_dict[ds_constants.LIBRARY_MATCHING_PREDICTED_KEY],
component_inchikey_dict))
all_inchikeys_in_use = []
for kwarg in component_inchikey_dict.keys():
all_inchikeys_in_use.extend(component_inchikey_dict[kwarg])
assert set(all_inchikeys_in_use) == set(all_inchikeys_in_library), (
'Inchikeys in library for library matching does not match full dataset.')
# Check that all inchikeys in query are found in full library of inchikeys.
assert set(
combine_inchikey_sets(
experiment_setup_dict[ds_constants.LIBRARY_MATCHING_QUERY_KEY],
component_inchikey_dict)).issubset(set(all_inchikeys_in_library)), (
'Inchikeys in query set for library matching not'
'found in library.')
def write_json_for_experiment(experiment_setup, output_dir):
"""Writes json for experiment, recording relevant files for each component.
Writes a json containing a list of TFRecord file names to read
for each experiment component, i.e. spectrum_prediction, library_matching.
Args:
experiment_setup: A dataset_setup_constants.ExperimentSetup tuple
output_dir: directory to write json
Returns:
Writes json recording which files to load for each component
of the experiment
Raises:
ValueError: if the experiment component is not specified to be taken from
either the main NIST library or the replicates library.
"""
experiment_json_dict = {}
for dataset_kwarg in experiment_setup.experiment_setup_dataset_dict:
if dataset_kwarg in experiment_setup.data_to_get_from_mainlib:
experiment_json_dict[dataset_kwarg] = [
(component_basename + FROM_MAINLIB_FILENAME_MODIFIER +
TFRECORD_FILENAME_END) for component_basename in
experiment_setup.experiment_setup_dataset_dict[dataset_kwarg]
]
elif dataset_kwarg in experiment_setup.data_to_get_from_replicates:
experiment_json_dict[dataset_kwarg] = [
(component_basename + FROM_REPLICATES_FILENAME_MODIFIER +
TFRECORD_FILENAME_END) for component_basename in
experiment_setup.experiment_setup_dataset_dict[dataset_kwarg]
]
else:
raise ValueError('Did not specify origin for {}.'.format(dataset_kwarg))
training_spectra_filename = (
ds_constants.MAINLIB_TRAIN_BASENAME + FROM_MAINLIB_FILENAME_MODIFIER +
NP_LIBRARY_ARRAY_END)
experiment_json_dict[
ds_constants.TRAINING_SPECTRA_ARRAY_KEY] = training_spectra_filename
with tf.gfile.Open(os.path.join(output_dir, experiment_setup.json_name),
'w') as writer:
experiment_json = json.dumps(experiment_json_dict)
writer.write(experiment_json)
def main(_):
tf.gfile.MkDir(FLAGS.output_master_dir)
main_train_val_test_fractions_tuple = tuple(
[float(elem) for elem in FLAGS.main_train_val_test_fractions])
main_train_val_test_fractions = train_test_split_utils.TrainValTestFractions(
*main_train_val_test_fractions_tuple)
replicates_train_val_test_fractions_tuple = tuple(
[float(elem) for elem in FLAGS.replicates_train_val_test_fractions])
replicates_train_val_test_fractions = (
train_test_split_utils.TrainValTestFractions(
*replicates_train_val_test_fractions_tuple))
mainlib_mol_list = parse_sdf_utils.get_sdf_to_mol(
FLAGS.main_sdf_name, max_atoms=FLAGS.max_atoms)
replicates_mol_list = parse_sdf_utils.get_sdf_to_mol(
FLAGS.replicates_sdf_name, max_atoms=FLAGS.max_atoms)
# Breaks the inchikeys lists into train/validation/test splits.
(mainlib_inchikey_dict, replicates_inchikey_dict, component_inchikey_dict) = (
make_mainlib_replicates_train_test_split(
mainlib_mol_list,
replicates_mol_list,
FLAGS.splitting_type,
main_train_val_test_fractions,
replicates_train_val_test_fractions,
mainlib_maximum_num_molecules_to_use=FLAGS.
mainlib_maximum_num_molecules_to_use,
replicates_maximum_num_molecules_to_use=FLAGS.
replicates_maximum_num_molecules_to_use))
# Writes TFRecords for each component using info from the main library file
write_mainlib_split_datasets(component_inchikey_dict, mainlib_inchikey_dict,
FLAGS.output_master_dir, FLAGS.max_atoms,
FLAGS.max_mass_spec_peak_loc)
# Writes TFRecords for each component using info from the replicates file
write_replicates_split_datasets(
component_inchikey_dict, replicates_inchikey_dict,
FLAGS.output_master_dir, FLAGS.max_atoms, FLAGS.max_mass_spec_peak_loc)
for experiment_setup in ds_constants.EXPERIMENT_SETUPS_LIST:
# Check that experiment setup is valid.
check_experiment_setup(experiment_setup.experiment_setup_dataset_dict,
component_inchikey_dict)
# Write a json for the experiment setups, pointing to local files.
write_json_for_experiment(experiment_setup, FLAGS.output_master_dir)
if __name__ == '__main__':
app.run(main)
| 42.566449 | 80 | 0.749718 | [
"Apache-2.0"
] | berlinguyinca/deep-molecular-massspec | make_train_test_split.py | 19,538 | Python |
class Solution(object):
def combinationSum(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
def recurhelper(nums,res,path,target,start):
if target==0:
res.append(path)
return
if target<0:
return
if target>0:
for i in xrange(start,len(nums)):
if nums[i]<=target:
recurhelper(nums,res,path+[nums[i]],target-nums[i],i)
res=[]
candidates.sort()
recurhelper(candidates,res,[],target,0)
return res | 29.434783 | 77 | 0.483013 | [
"MIT"
] | Tanych/CodeTracking | 39-Combination-Sum/solution.py | 677 | Python |
playlists = """https://www.youtube.com/playlist?list=PLWOxuTkHhAq70Q4ZF1EcsRaW1uHs6zygL
https://www.youtube.com/playlist?list=PLObmubSxSaRKdWB3BoyndydqDGkgbMlbC
https://www.youtube.com/playlist?list=PLxN__ARc_RfpOAdnB14rBSF6xDHDySNE4
https://www.youtube.com/playlist?list=PLxN__ARc_Rfrsee6JJkU198Fa0rxjbTgQ
https://www.youtube.com/playlist?list=PLxN__ARc_Rfr3AipUqXTqjbL990_pUBCd
https://www.youtube.com/playlist?list=PLxN__ARc_RfrYYGV0uN9G79ASYNYHN5bi
https://www.youtube.com/playlist?list=PLnyIUJZrQfakO0xkKPxceT4rOhzwd9NMU
https://www.youtube.com/playlist?list=PLnyIUJZrQfam-mBL4Wq3qncy68DfeI_Op
https://www.youtube.com/playlist?list=PLnyIUJZrQfalS8yK1K7NGWbZXiY6nnDxg
https://www.youtube.com/playlist?list=PLnyIUJZrQfaluNlLUmbGoDJUUas4T7W_C
https://www.youtube.com/playlist?list=PLjqsQycWIhFfKtcQlGgnGDQuUbUjSgqrB
https://www.youtube.com/playlist?list=PLaK1z6C61upcg-fWsHoiWtDwsKP7Ti23r
https://www.youtube.com/playlist?list=PLZ7Ye5d76T7j9l2BONR05FDyUaczK3NWP
https://www.youtube.com/playlist?list=PLZ7Ye5d76T7gzXf6-_QdT3qRplcIODcs0
https://www.youtube.com/playlist?list=PLZ7Ye5d76T7j-QdeSLenweE_E_w2gWGDd
https://www.youtube.com/playlist?list=PLcfHpRUOJGcOSIYV7UX0Fxb3N9aYiIeIX
https://www.youtube.com/playlist?list=PLcfHpRUOJGcN3QcWyDJ9D6e_BDmkmRoaX
https://www.youtube.com/playlist?list=PLcfHpRUOJGcN_q9JvR09_Wy4Yhl_9eG6r
https://www.youtube.com/playlist?list=PL3R3ezbvlgNpyAxZzQ3vEIO3R6XYzX-Oa
https://www.youtube.com/playlist?list=PLTufViCW8MklAol1CKXYFmWB-6LMS5cLS
https://www.youtube.com/playlist?list=PL2kIyfRWfPs6UC--1IqcxcPSfXsNyGFiK
https://www.youtube.com/playlist?list=PLSQCEkZEhfq9mra1sy7vI4Ra2kgPslVaX
https://www.youtube.com/playlist?list=PL1ad0PFSaS7c0k5guh90oS-7csla07Or1
https://www.youtube.com/playlist?list=PLy_Iceng2X1A8MwZ8hWC9ITqz0fYU3Mau
https://www.youtube.com/playlist?list=PLTufViCW8MknjUllPTnq5PtRlZTAJ2a-9
https://www.youtube.com/playlist?list=PLTufViCW8MknQc1Dgth6b0CyFhXWClDJs
https://www.youtube.com/playlist?list=PLTufViCW8MkkAnqcreRA78ytpxuf2r8ac
https://www.youtube.com/playlist?list=PLttdYH6hzg6fmQVkawJWxBKPRGVxvm11I
https://www.youtube.com/playlist?list=PLAmqhEyEmobA1M3t4XjlAQthTRToIYku4
https://www.youtube.com/playlist?list=PLAmqhEyEmobBAo9FR9_8JRt9ykj2k92-e
https://www.youtube.com/playlist?list=PLJJd8CUjfjZqb9tGMTfAYPfVcO-yMSiV3
https://www.youtube.com/playlist?list=PLJJd8CUjfjZqLKBBP0D_jqtG4OPYTTByR
https://www.youtube.com/playlist?list=PLAmqhEyEmobDzbAqxuYjpjDLZFnhws7no
https://www.youtube.com/playlist?list=PLgE5g9Ln1vGHGjsDr-4t-Jsw4C1mjWIQV"""
playlists = playlists.split("\n") | 70.388889 | 87 | 0.866219 | [
"MIT"
] | portikCoder/yt-list-downloader | src/yt_list_downloader/input.py | 2,534 | Python |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v3/proto/services/campaign_criterion_simulation_service.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v3.proto.resources import campaign_criterion_simulation_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_resources_dot_campaign__criterion__simulation__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.api import client_pb2 as google_dot_api_dot_client__pb2
from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v3/proto/services/campaign_criterion_simulation_service.proto',
package='google.ads.googleads.v3.services',
syntax='proto3',
serialized_options=_b('\n$com.google.ads.googleads.v3.servicesB\'CampaignCriterionSimulationServiceProtoP\001ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v3/services;services\242\002\003GAA\252\002 Google.Ads.GoogleAds.V3.Services\312\002 Google\\Ads\\GoogleAds\\V3\\Services\352\002$Google::Ads::GoogleAds::V3::Services'),
serialized_pb=_b('\nRgoogle/ads/googleads_v3/proto/services/campaign_criterion_simulation_service.proto\x12 google.ads.googleads.v3.services\x1aKgoogle/ads/googleads_v3/proto/resources/campaign_criterion_simulation.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\"|\n%GetCampaignCriterionSimulationRequest\x12S\n\rresource_name\x18\x01 \x01(\tB<\xe0\x41\x02\xfa\x41\x36\n4googleads.googleapis.com/CampaignCriterionSimulation2\xc5\x02\n\"CampaignCriterionSimulationService\x12\x81\x02\n\x1eGetCampaignCriterionSimulation\x12G.google.ads.googleads.v3.services.GetCampaignCriterionSimulationRequest\x1a>.google.ads.googleads.v3.resources.CampaignCriterionSimulation\"V\x82\xd3\xe4\x93\x02@\x12>/v3/{resource_name=customers/*/campaignCriterionSimulations/*}\xda\x41\rresource_name\x1a\x1b\xca\x41\x18googleads.googleapis.comB\x8e\x02\n$com.google.ads.googleads.v3.servicesB\'CampaignCriterionSimulationServiceProtoP\x01ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v3/services;services\xa2\x02\x03GAA\xaa\x02 Google.Ads.GoogleAds.V3.Services\xca\x02 Google\\Ads\\GoogleAds\\V3\\Services\xea\x02$Google::Ads::GoogleAds::V3::Servicesb\x06proto3')
,
dependencies=[google_dot_ads_dot_googleads__v3_dot_proto_dot_resources_dot_campaign__criterion__simulation__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_client__pb2.DESCRIPTOR,google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,google_dot_api_dot_resource__pb2.DESCRIPTOR,])
_GETCAMPAIGNCRITERIONSIMULATIONREQUEST = _descriptor.Descriptor(
name='GetCampaignCriterionSimulationRequest',
full_name='google.ads.googleads.v3.services.GetCampaignCriterionSimulationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v3.services.GetCampaignCriterionSimulationRequest.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=_b('\340A\002\372A6\n4googleads.googleapis.com/CampaignCriterionSimulation'), file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=312,
serialized_end=436,
)
DESCRIPTOR.message_types_by_name['GetCampaignCriterionSimulationRequest'] = _GETCAMPAIGNCRITERIONSIMULATIONREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetCampaignCriterionSimulationRequest = _reflection.GeneratedProtocolMessageType('GetCampaignCriterionSimulationRequest', (_message.Message,), dict(
DESCRIPTOR = _GETCAMPAIGNCRITERIONSIMULATIONREQUEST,
__module__ = 'google.ads.googleads_v3.proto.services.campaign_criterion_simulation_service_pb2'
,
__doc__ = """Request message for
[CampaignCriterionSimulationService.GetCampaignCriterionSimulation][google.ads.googleads.v3.services.CampaignCriterionSimulationService.GetCampaignCriterionSimulation].
Attributes:
resource_name:
Required. The resource name of the campaign criterion
simulation to fetch.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v3.services.GetCampaignCriterionSimulationRequest)
))
_sym_db.RegisterMessage(GetCampaignCriterionSimulationRequest)
DESCRIPTOR._options = None
_GETCAMPAIGNCRITERIONSIMULATIONREQUEST.fields_by_name['resource_name']._options = None
_CAMPAIGNCRITERIONSIMULATIONSERVICE = _descriptor.ServiceDescriptor(
name='CampaignCriterionSimulationService',
full_name='google.ads.googleads.v3.services.CampaignCriterionSimulationService',
file=DESCRIPTOR,
index=0,
serialized_options=_b('\312A\030googleads.googleapis.com'),
serialized_start=439,
serialized_end=764,
methods=[
_descriptor.MethodDescriptor(
name='GetCampaignCriterionSimulation',
full_name='google.ads.googleads.v3.services.CampaignCriterionSimulationService.GetCampaignCriterionSimulation',
index=0,
containing_service=None,
input_type=_GETCAMPAIGNCRITERIONSIMULATIONREQUEST,
output_type=google_dot_ads_dot_googleads__v3_dot_proto_dot_resources_dot_campaign__criterion__simulation__pb2._CAMPAIGNCRITERIONSIMULATION,
serialized_options=_b('\202\323\344\223\002@\022>/v3/{resource_name=customers/*/campaignCriterionSimulations/*}\332A\rresource_name'),
),
])
_sym_db.RegisterServiceDescriptor(_CAMPAIGNCRITERIONSIMULATIONSERVICE)
DESCRIPTOR.services_by_name['CampaignCriterionSimulationService'] = _CAMPAIGNCRITERIONSIMULATIONSERVICE
# @@protoc_insertion_point(module_scope)
| 56.769912 | 1,247 | 0.833983 | [
"Apache-2.0"
] | BenRKarl/google-ads-python | google/ads/google_ads/v3/proto/services/campaign_criterion_simulation_service_pb2.py | 6,415 | Python |
import numpy as np
from env import Env
from models import PolicyNet, Critic
from utils import one_hot
import torch
from torch.optim import Adam
import time
import os
from datetime import datetime
import math
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
#------------------------SET PARAMETERS----------------------------
SEED = 17
BATCH_SIZE = 128
N_NODES = 11
N_DEPOT = 1
NUM_LAYERS = 1
CAPACITY = [20,15,10]
MAX_DEMAND = 10
N_VEHICLES = len(CAPACITY)
DIM_STATIC = 2
DIM_DYNAMIC = 1 + N_VEHICLES
DIM_LOAD = N_VEHICLES
DIM_EMBED = 128
MAX_EP_lEN = 16
GAMMA = 0.99
ENTROPY_REG = 0.01
MAX_GRAD_NORM = 2
DROPOUT = 0.1
EMBED_TYPE = 'conv1d'
LOG_INTERVAL = 200
#----------------INITIALIZE ENVIROMENT AND POLICIES----------------
env_test = Env(seed = SEED, batch_size = BATCH_SIZE, capacity = CAPACITY,
n_nodes = N_NODES, n_depot = N_DEPOT, max_demand = MAX_DEMAND, n_agents = N_VEHICLES)
policy = [PolicyNet(batch_size = BATCH_SIZE, n_nodes = N_NODES, n_agents=N_VEHICLES, num_layers = NUM_LAYERS,
dim_s = DIM_STATIC, dim_d = DIM_DYNAMIC,
dim_embed = DIM_EMBED, n_glimpses = 0, embeding_type=EMBED_TYPE,
dropout = DROPOUT).to(device) for i in range(N_VEHICLES)]
#------------------LOAD TRAINDEL MODEL---------------------------
model_dir = 'weights/model_exp_1.pt'
policy_name = "policy_agent_X"
if os.path.isfile(model_dir):
checkpoint = torch.load(model_dir,map_location=device)
else:
raise ValueError('No model file!')
for agent_id in range(N_VEHICLES):
p_name = policy_name.replace("X",str(agent_id))
policy[agent_id].load_state_dict(checkpoint[p_name])
#-----------------RUN TRAINED POLICY----------------
num_epochs = math.ceil(1000/BATCH_SIZE)
total_tests = []
total_times = []
for i in range(num_epochs):
start = time.time()
o_t, d_t, r_t = env_test.reset(), False, 0
actions_ep = []
log_probs_ep = []
rewards_ep = []
values_ep = []
last_hh_t = [None]*N_VEHICLES
for t in range(int(MAX_EP_lEN) ):
actions = []
actions_one_hot = []
log_probs = []
values = []
for agent_id in range(N_VEHICLES) :
model = policy[agent_id].eval()
logits, prob , log_p, last_hh_t[agent_id] = model(o_t, last_hh_t[agent_id], agent_id)
#--------- GREEDY POLICY ------------
act = torch.argmax(prob, dim =1) # [ batch size ]
actions.append(act.detach())
ot_2, d_t, r_t = env_test.step(act.detach().unsqueeze(1), agent_id)
o_t = ot_2
values.append( r_t )
r_step = torch.stack(values, dim = 1) #[batch_size, n_agents]
a = torch.stack(actions, dim = 1) #[batch_size, n_agents]
actions_ep.append(a)
rewards_ep.append(r_step)
end = time.time()
rewards = torch.stack(rewards_ep, dim = 2 ).sum(dim=2).sum(dim=1) #[batch_size, n_agents, ep_len]
total_tests.append(rewards)
total_times.append((end-start)/BATCH_SIZE)
#------------------- SAVE RESULTS -----------------------
rewards_total = torch.stack(total_tests, dim=1).reshape(-1,)
np_results = rewards_total.numpy()
np.save('vrp_results_RL',np_results)
np_runtimes = np.array(total_times).reshape(-1,)
np.save('vrp_runtimes_RL',np_runtimes)
| 33.018692 | 109 | 0.589301 | [
"MIT"
] | jomavera/DRL_HFV | infer.py | 3,533 | Python |
"""File Converter
.nfc """
import asyncio
import os
import time
from datetime import datetime
from userbot.utils import admin_cmd, progress
@borg.on(admin_cmd(pattern="nfc (.*)")) # pylint:disable=E0602
async def _(event):
if event.fwd_from:
return
input_str = event.pattern_match.group(1)
reply_message = await event.get_reply_message()
if reply_message is None:
await event.edit("reply to a media to use the `nfc` operation.\nInspired by @FileConverterBot")
return
await event.edit("trying to download media file, to my local")
try:
start = datetime.now()
c_time = time.time()
downloaded_file_name = await borg.download_media(
reply_message,
Config.TMP_DOWNLOAD_DIRECTORY,
progress_callback=lambda d, t: asyncio.get_event_loop().create_task(
progress(d, t, event, c_time, "trying to download")
)
)
except Exception as e: # pylint:disable=C0103,W0703
await event.edit(str(e))
else:
end = datetime.now()
ms = (end - start).seconds
await event.edit("Downloaded to `{}` in {} seconds.".format(downloaded_file_name, ms))
new_required_file_name = ""
new_required_file_caption = ""
command_to_run = []
force_document = False
voice_note = False
supports_streaming = False
if input_str == "voice":
new_required_file_caption = "NLFC_" + str(round(time.time())) + ".opus"
new_required_file_name = Config.TMP_DOWNLOAD_DIRECTORY + "/" + new_required_file_caption
command_to_run = [
"ffmpeg",
"-i",
downloaded_file_name,
"-map",
"0:a",
"-codec:a",
"libopus",
"-b:a",
"100k",
"-vbr",
"on",
new_required_file_name
]
voice_note = True
supports_streaming = True
elif input_str == "mp3":
new_required_file_caption = "NLFC_" + str(round(time.time())) + ".mp3"
new_required_file_name = Config.TMP_DOWNLOAD_DIRECTORY + "/" + new_required_file_caption
command_to_run = [
"ffmpeg",
"-i",
downloaded_file_name,
"-vn",
new_required_file_name
]
voice_note = False
supports_streaming = True
else:
await event.edit("not supported")
os.remove(downloaded_file_name)
return
logger.info(command_to_run)
# TODO: re-write create_subprocess_exec 😉
process = await asyncio.create_subprocess_exec(
*command_to_run,
# stdout must a pipe to be accessible as process.stdout
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
# Wait for the subprocess to finish
stdout, stderr = await process.communicate()
e_response = stderr.decode().strip()
t_response = stdout.decode().strip()
os.remove(downloaded_file_name)
if os.path.exists(new_required_file_name):
end_two = datetime.now()
await borg.send_file(
entity=event.chat_id,
file=new_required_file_name,
caption="`File Successfully converted by` @kannappan04",
allow_cache=False,
silent=True,
force_document=force_document,
voice_note=voice_note,
supports_streaming=supports_streaming,
progress_callback=lambda d, t: asyncio.get_event_loop().create_task(
progress(d, t, event, c_time, "trying to upload")
)
)
ms_two = (end_two - end).seconds
os.remove(new_required_file_name)
await event.edit(f"converted in {ms_two} seconds")
| 37.110092 | 103 | 0.5644 | [
"MIT"
] | anandhu-dev/catuserbot | userbot/plugins/fconvert.py | 4,048 | Python |
#!/usr/bin/env python3
import pwncat
from pwncat.commands import CommandDefinition
class Command(CommandDefinition):
"""
Exit the interactive prompt. If sessions are active, you will
be prompted to confirm. This shouldn't be run from a configuration
script.
"""
PROG = "exit"
ARGS = {}
LOCAL = True
def run(self, manager, args):
raise pwncat.manager.InteractiveExit
| 21.789474 | 70 | 0.683575 | [
"MIT"
] | Mitul16/pwncat | pwncat/commands/exit.py | 414 | Python |
from __future__ import absolute_import
#SymPy is a non-commercial alternative to Mathematica and Maple
# SymPy can map variable to a value or a matrix.
# SymPy's Symbolic Statistical Modelling uses scintific computing.
import sys
import numpy as np
import sympy as sp
import pandas as pd
from pathlib import Path
from .tokens import *
from .equation import *
class Equations(Equation):
def __init__(self):
path = Path(__file__).parent
self.filepath = path.joinpath("fixtures","equations.xlsx")
self.equations_sheet = "equations"
self.column_mapping_sheet = "col_var_mapping"
self.data_sheet = "values"
self.mappings = None
self.df = None
self.equations_df = pd.DataFrame()
self.equations = dict()
self.lhs = None
self.values = dict()
def upload_data_equations(self, filepath, equations_sheet, data_sheet, column_mapping_sheet=""):
if not self.validate_file_inputs(filepath, equations_sheet, data_sheet):
return False
self.filepath = filepath
self.equations_df = pd.read_excel(self.filepath, sheet_name=equations_sheet, mangle_dupe_cols=True)
self.df = pd.read_excel(self.filepath, sheet_name=data_sheet, mangle_dupe_cols=True)
if column_mapping_sheet:
self.mappings = pd.read_excel(self.filepath, sheet_name=column_mapping_sheet, mangle_dupe_cols=True)
def validate_file_inputs(self, filepath, equations_sheet, data_sheet):
if not filepath or not equations_sheet or not data_sheet:
raise Exception("Empty upload data inputs. Please provide valid inputs to file upload.")
else:
return True
return False
def process_equations(self):
self.lhs = self.equations_df['name']
eq_list = self.equations_df['equation']
self.equations = dict()
for variable, equation in zip(self.lhs, eq_list):
self.equations[variable] = Equation(equation, self.df)
self.equations[variable].set_symbols(self.mappings)
self.values[variable] = self.equations[variable].evaluate(self.values)
result_df = pd.DataFrame.from_dict(self.values)
result_df.to_csv("results.csv", index=False)
return self.values
| 41.781818 | 112 | 0.691036 | [
"MIT"
] | sushmaakoju/parser | src/equation_parser/equations.py | 2,298 | Python |
from win32com import storagecon
import pythoncom, os, win32api
import win32com.test.util
import unittest
class TestEnum(win32com.test.util.TestCase):
def testit(self):
fname, tmp = win32api.GetTempFileName(win32api.GetTempPath(),'stg')
m=storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE
## file, mode, format, attrs (always 0), IID (IStorage or IPropertySetStorage, storage options(only used with STGFMT_DOCFILE)
pss=pythoncom.StgOpenStorageEx(fname, m, storagecon.STGFMT_FILE, 0 , pythoncom.IID_IPropertySetStorage)
### {"Version":2,"reserved":0,"SectorSize":512,"TemplateFile":u'somefilename'})
## FMTID_SummaryInformation FMTID_DocSummaryInformation FMTID_UserDefinedProperties
psuser=pss.Create(pythoncom.FMTID_UserDefinedProperties,
pythoncom.IID_IPropertySetStorage,
storagecon.PROPSETFLAG_DEFAULT,
storagecon.STGM_READWRITE|storagecon.STGM_CREATE|storagecon.STGM_SHARE_EXCLUSIVE) ## its very picky about flag combinations!
psuser.WriteMultiple((3,4),('hey','bubba'))
psuser.WritePropertyNames((3,4),('property3','property4'))
expected_summaries = []
expected_summaries.append( ('property3', 3, pythoncom.VT_BSTR))
expected_summaries.append( ('property4', 4, pythoncom.VT_BSTR))
psuser=None
pssum=pss.Create(pythoncom.FMTID_SummaryInformation,
pythoncom.IID_IPropertySetStorage,
storagecon.PROPSETFLAG_DEFAULT,
storagecon.STGM_READWRITE|storagecon.STGM_CREATE|storagecon.STGM_SHARE_EXCLUSIVE)
pssum.WriteMultiple((storagecon.PIDSI_AUTHOR,storagecon.PIDSI_COMMENTS),('me', 'comment'))
pssum=None
pss=None ## doesn't seem to be a close or release method, and you can't even reopen it from the same process until previous object is gone
pssread=pythoncom.StgOpenStorageEx(fname, storagecon.STGM_READ|storagecon.STGM_SHARE_EXCLUSIVE, storagecon.STGFMT_FILE, 0 , pythoncom.IID_IPropertySetStorage)
found_summaries = []
for psstat in pssread:
ps=pssread.Open(psstat[0],storagecon.STGM_READ|storagecon.STGM_SHARE_EXCLUSIVE)
for p in ps:
p_val = ps.ReadMultiple((p[1],))[0]
if (p[1]==storagecon.PIDSI_AUTHOR and p_val=='me') or \
(p[1]==storagecon.PIDSI_COMMENTS and p_val=='comment'):
pass
else:
self.fail("Uxexpected property %s/%s" % (p, p_val))
ps=None
## FMTID_UserDefinedProperties can't exist without FMTID_DocSummaryInformation, and isn't returned independently from Enum
## also can't be open at same time
if psstat[0]==pythoncom.FMTID_DocSummaryInformation:
ps=pssread.Open(pythoncom.FMTID_UserDefinedProperties,storagecon.STGM_READ|storagecon.STGM_SHARE_EXCLUSIVE)
for p in ps:
found_summaries.append(p)
ps=None
psread=None
expected_summaries.sort()
found_summaries.sort()
self.assertEqual(expected_summaries, found_summaries)
if __name__=='__main__':
unittest.main()
| 54.935484 | 167 | 0.644745 | [
"Apache-2.0"
] | Matchoc/python_env | python35/Lib/site-packages/win32com/test/testStorage.py | 3,406 | Python |
"""A simple CLI app to practice grammatical genders of German nouns."""
import argparse
import json
import pathlib
import pandas as pd
class WordList:
"""Data structure to store a pandas dataframe and some structural details.
Args:
path (pathlib.Path or None): The path (without suffix) to a wordlist.
If there is no current list at the path, will create a new list.
If no path is provided the WordList will not be fully initialized and will
require a subsequent call of `load` or `new`.
"""
def __init__(self, path=None):
self.words = None
self.structure = {}
if path is not None:
self.load(path)
def load(self, path: pathlib.Path):
"""Load stored data."""
try:
self.words = pd.read_csv(path.with_suffix(".csv"))
with path.with_suffix(".json").open() as f:
self.structure = json.loads(f.read())
self.words.set_index(self.structure["index"], inplace=True)
except FileNotFoundError as exception:
raise FileNotFoundError(
"No word list found with the specified name."
) from exception
def new(self, language: str = "german", score_inertia: int = 2):
"""Create a new wordlist.
Args:
language (str): The name of a language in the GENDERS dictionary.
score_inertia (int): Determines how resistant scores are to change.
Must be a positive integer. Higher values will require more consecutive
correct answers to reduce the frequency of a specific word.
"""
gender_options = get_languages()
try:
genders = gender_options[language]
except KeyError as exception:
raise ValueError(f"Unknown language: {language}") from exception
columns = ["Word", "Gender", "Correct", "Wrong", "Weight"]
self.structure = {
"language": language,
"genders": genders,
"aliases": self._get_aliases(genders),
"default guesses": score_inertia,
"index": "Word",
"column count": 3,
}
self.words = pd.DataFrame(columns=columns)
self.words.set_index(self.structure["index"], inplace=True)
def save(self, path: pathlib.Path):
"""Saves words to a .csv file and structure to a .json."""
self.words.to_csv(path.with_suffix(".csv"))
with path.with_suffix(".json").open(mode="w") as f:
f.write(json.dumps(self.structure))
def format_gender(self, gender_string: str):
"""Attempts to find a matching gender for gender_string.
Args:
gender_string (str): A gender for the word list or an alias of a gender.
Returns:
The associated gender.
Raises:
ValueError: `gender_string` does not match any gender or alias.
"""
gender_string = gender_string.lower()
if gender_string in self.structure["genders"]:
return gender_string
if gender_string in self.structure["aliases"]:
return self.structure["aliases"][gender_string]
raise ValueError(f"Unknown gender: {gender_string}")
def add(self, gender: str, word: str):
"""Add a new word to the list.
Args:
gender (str): The gender of the word being added.
word (str): The word to add.
Raises:
ValueError: `gender` does not match the current wordlist or the word is
already present in the list.
"""
gender = self.format_gender(gender)
word = word.capitalize()
if gender not in self.structure["genders"]:
raise ValueError(
f"{gender} is not a valid gender for the current wordlist."
)
if word in self.words.index:
raise ValueError(f"{word} is already included.")
n_genders = len(self.structure["genders"])
row = [
gender,
self.structure["default guesses"],
self.structure["default guesses"] * (n_genders - 1),
(n_genders - 1) / n_genders,
]
self.words.loc[word] = row
def get_words(self, n: int, distribution: str = "weighted"):
"""Selects and returns a sample of words and their genders.
Args:
n (int): The number of results wanted.
distribution (str): The sampling method to use. Either `uniform` or
`weighted`.
Yields:
A tuple of strings in the format (word, gender).
"""
if distribution == "uniform":
sample = self.words.sample(n=n)
elif distribution == "weighted":
sample = self.words.sample(n=n, weights="Weight")
else:
raise ValueError(f"Unknown value for distribution: {distribution}")
for row in sample.iterrows():
yield row[0], row[1].Gender
def update_weight(self, word, guess):
"""Update the weighting on a word based on the most recent guess.
Args:
word (str): The word to update. Should be in the index of self.words.
guess (bool): Whether the guess was correct or not.
"""
row = self.words.loc[word]
if guess:
row.Correct += 1
else:
row.Wrong += 1
n_genders = len(self.structure["genders"])
total = row.Correct + row.Wrong
if not total % n_genders:
# Throw away some data as evenly as possible to allow for change over time
# Never throw away the last negative result to avoid question being lost.
if row.Correct:
wrongs_to_throw = min(row.Wrong - 1, n_genders - 1)
row.Wrong -= wrongs_to_throw
row.Correct -= n_genders - wrongs_to_throw
else:
row.wrong -= n_genders
row.Weight = row.Wrong / (row.Correct + row.Wrong)
self.words.loc[word] = row
@staticmethod
def _get_aliases(genders: dict):
"""Create a dictionary of aliases and the genders they refer to.
May have issues if multiple genders have the same article or first letter.
"""
aliases = {}
for gender, article in genders.items():
aliases[gender[0]] = gender
aliases[article] = gender
return aliases
def force_console_input(
query: str,
allowable,
onfail: str = "Input not recognised, please try again.\n",
case_sensitive=False,
):
"""Get an input from the user matching some string in allowable.
Args:
query (str): The query to issue the user with.
allowable (str or container): The options which the user is allowed to submit.
If this is a string, acceptable answers will be substrings.
For containers acceptable answers will be elements of the container.
Returns:
The correct input returned
Raises:
IOError: A request to quit was submitted.
"""
if not allowable:
raise ValueError("At least one entry must be allowable.")
submission = input(query)
while True:
if not case_sensitive:
submission = submission.lower()
if submission in ("quit", "exit"):
raise IOError("Exit command received.")
if submission in allowable:
return submission
submission = input(onfail)
def get_languages():
"""Gets the language: genders dictionary."""
with open("genders.json", "r") as f:
return json.loads(f.read())
def main():
"""Orchestration function for the CLI."""
args = _parse_args()
path = pathlib.Path("lists", args.words)
try:
words = _load_words(path)
except IOError:
print("Exiting.")
return
if args.quiz_length is not None:
if args.quiz_length == 0:
print("Starting quiz in endless mode. Answer `quit` to end the quiz.")
correct, answered = _quiz_endless(words)
elif args.quiz_length > 0:
print(f"Starting quiz with length {args.quiz_length}...\n")
correct, answered, _ = _quiz(words, args.quiz_length)
else:
raise ValueError(f"Invalid quiz length: {args.quiz_length}.")
print(f"\nYou successfully answered {correct} out of {answered} questions!")
elif args.add_words:
print("Entering word addition mode...")
_add_words(words)
elif args.load_words:
print(f"Importing word file {args.load_words}...")
added, reps = _import_words(words, args.load_words)
print(f"{added} words successfully imported. {reps} duplicates skipped.")
elif args.reset_scores:
print("Resetting scores")
words = WordList()
words.new()
_import_words(words, path.with_suffix(".csv"))
_save_and_exit(words, path)
def _parse_args():
parser = argparse.ArgumentParser(
description="Flashcard app for German grammatical genders."
)
mode = parser.add_mutually_exclusive_group(required=True)
mode.add_argument(
"-q", "--quiz", type=int, help="Start the app in quiz mode.", dest="quiz_length"
)
mode.add_argument(
"-a",
"--add-words",
action="store_true",
help="Start the app in manual word addition mode.",
)
mode.add_argument(
"-l",
"--load-words",
help="Concatenates a prewritten list of words into the saved WordList.",
)
mode.add_argument(
"-r",
"--reset-scores",
action="store_true",
help="Reset all scores in the specified word list.",
)
parser.add_argument(
"-w", "--words", default="main_list", help="The name of the WordList to use."
)
return parser.parse_args()
def _load_words(path):
"""Encapsulates the loading/newfile creation logic."""
try:
words = WordList(path)
print("Words successfully loaded.")
except FileNotFoundError:
print(f"No word list found with given name.")
newfile = force_console_input(
"Would you like to create a new wordlist with the specified name? Y/N: ",
options=["y", "yes", "n", "no"],
)
if newfile[0] == "y":
words = WordList()
language = force_console_input(
query="Which language should be used?\n",
onfail="Language not recognised, please try again or check genders.json\n",
options=get_languages(),
)
words.new(language=language)
print(f"New WordList for language {language} successfully created.")
else:
raise IOError
return words
def _quiz(wordlist, quiz_length):
"""Runs a command line quiz of the specified length."""
pd.options.mode.chained_assignment = None # Suppresses SettingWithCopyWarning
answered, correct = 0, 0
for word, gender in wordlist.get_words(quiz_length):
guess = input(f"What is the gender of {word}? ").lower()
if guess in ("quit", "exit"):
break
answered += 1
try:
guess = wordlist.format_gender(guess)
except ValueError:
print("Unrecognised guess, skipping.\n")
continue
accurate = gender == guess
wordlist.update_weight(word, accurate)
if accurate:
print("Correct!\n")
correct += 1
else:
print(f"Incorrect! The correct gender is {gender}.\n")
return correct, answered, answered == quiz_length
def _quiz_endless(wordlist):
"""Runs quizzes in batches of 20 until quit or exit is answered."""
correct, answered = 0, 0
finished = False
while not finished:
results = _quiz(wordlist, 20)
correct += results[0]
answered += results[1]
finished = not results[2]
return correct, answered
def _add_words(wordlist):
"""CLI for adding words individually to the wordlist."""
print("Type a word with gender eg `m Mann` or `quit` when finished.")
while True:
input_str = input()
if input_str in ("quit", "exit"):
print("Exiting word addition mode...")
break
try:
gender, word = input_str.split()
wordlist.add(gender, word)
except ValueError as e:
print(e)
def _import_words(wordlist, import_path):
"""Loads words from a csv file at import_path into `wordlist`."""
new_words = pd.read_csv(import_path)
words_added = 0
repetitions = 0
for _, row in new_words.iterrows():
try:
wordlist.add(row.Gender, row.Word)
words_added += 1
except ValueError:
repetitions += 1
return words_added, repetitions
def _save_and_exit(wordlist, path):
while True:
try:
wordlist.save(path=path)
# TODO: Can WordList be made into a context manager?
print("WordList successfully saved, goodbye!")
break
except PermissionError:
print("PermissionError! File may be open in another window.")
retry = force_console_input("Try again? Y/N: ", ["y", "yes", "n", "no"])
if retry[0] == "y":
continue
else:
print("Exiting without saving changes.")
if __name__ == "__main__":
main()
| 32.370546 | 91 | 0.587981 | [
"MIT"
] | n-Holmes/deutscheflash | deutscheflash.py | 13,628 | Python |
from django.test import TestCase, override_settings
from api.tests.fixtures import patch_requests
from .. import client
@patch_requests
@override_settings(
VBO_URI_TEMPLATE="http://api/bag/verblijfsobject/{landelijk_id}/")
class ClientTest(TestCase):
def setUp(self):
self.client = client.Client()
def test_get_vbo(self):
vbo = self.client.get_verblijfsobject('0363010000998532')
self.assertIsNotNone(vbo)
self.assertEquals('0363010000998532', vbo.landelijk_id)
def test_get_vbo_follow_pand_relatie(self):
vbo = self.client.get_verblijfsobject('0363010000998532')
panden = self.client.get_panden(vbo)
self.assertIsNotNone(panden)
self.assertEquals(1, len(panden))
self.assertEquals("31", panden[0].pand_status)
def test_get_vbo_follow_beperking_relatie(self):
vbo = self.client.get_verblijfsobject('0363010000758545')
beperkingen = self.client.get_beperkingen(vbo)
self.assertIsNotNone(beperkingen)
self.assertEquals(1, len(beperkingen))
self.assertEquals("HS", beperkingen[0].beperking)
| 32.257143 | 70 | 0.719221 | [
"MPL-2.0"
] | Amsterdam/zwaailicht | web/zwaailicht/api/tests/test_client.py | 1,129 | Python |
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def oddEvenList(self, head: Optional[ListNode]) -> Optional[ListNode]:
if head is None:
return None
odd, even = ListNode(), ListNode()
oddTail, evenTail = odd, even
count = 0
while head:
if count % 2 == 0:
evenTail.next = head
evenTail = evenTail.next
else:
oddTail.next = head
oddTail = oddTail.next
head = head.next
count += 1
evenTail.next = odd.next
oddTail.next = None
return even.next
| 27.103448 | 74 | 0.493639 | [
"MIT"
] | MayaScarlet/leetcode-python | 328-odd-even-linked-list/328-odd-even-linked-list.py | 786 | Python |
# services/resource/project/utils/enums.py
from enum import Enum
class Status(Enum):
normal = 0
delete = 1
other = 2
class Scope(Enum):
user = 'UserScope'
admin = 'AdminScope'
| 12.625 | 42 | 0.643564 | [
"MIT"
] | spruce-cq/sblog | services/resource/project/utils/enums.py | 202 | Python |
import os
import shutil
#for i in range(8050,8051):
# old=str(i) + '.bin'
# new="../new/"+'%06d.bin' % i
# shutil.move(old,new)
file1 = open('a.txt', 'r')
Lines = file1.readlines()
file2 = open('b.txt', 'r')
Lines2 = file2.readlines()
calib_DIR='./calib/'
img_DIR='./image_2/'
label_DIR='./label_2/'
pcl_DIR='./velodyne/'
# Strips the newline character
for line in Lines2:
line=line.rstrip()
print(line)
pcl_fname=line+'.bin'
img_fname=line+'.png'
txt_fname=line+'.txt'
shutil.move(calib_DIR+txt_fname, "../testing/"+calib_DIR+txt_fname)
#shutil.move(label_DIR+txt_fname, "../testing/"+label_DIR+txt_fname)
#shutil.move(img_DIR+img_fname, "../testing/"+img_DIR+img_fname)
#shutil.move(pcl_DIR+pcl_fname, "../testing/"+pcl_DIR+pcl_fname)
| 26.4 | 72 | 0.655303 | [
"MIT"
] | UILXELA/Cooperative-3D-Object-Detection-Using-Shared-Raw-LIDAR-Data | useful_scripts/split.py | 792 | Python |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_warren_bors_teraud.iff"
result.attribute_template_id = 9
result.stfName("npc_name","warren_bors_teraud")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | 26.235294 | 64 | 0.730942 | [
"MIT"
] | SWGANHServices/GameServer_Legacy | data/scripts/templates/object/mobile/shared_warren_bors_teraud.py | 446 | Python |
#
# Copyright (c) 2015 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import
from __future__ import print_function
import os
import ntpath
import posixpath
from commoncode import compat
from commoncode import filetype
from commoncode import fileutils
from commoncode.testcase import FileBasedTesting
from commoncode.system import on_windows
"""
Shared archiving test utils.
"""
def check_size(expected_size, location):
assert expected_size == os.stat(location).st_size
def check_files(test_dir, expected):
"""
Walk test_dir.
Check that all dirs are readable.
Check that all files are:
* non-special,
* readable,
* have a posix path that ends with one of the expected tuple paths.
"""
result = []
locs = []
if filetype.is_file(test_dir):
test_dir = fileutils.parent_directory(test_dir)
test_dir_path = fileutils.as_posixpath(test_dir)
for top, _, files in os.walk(test_dir):
for f in files:
location = os.path.join(top, f)
locs.append(location)
path = fileutils.as_posixpath(location)
path = path.replace(test_dir_path, '').strip('/')
result.append(path)
assert sorted(expected) == sorted(result)
for location in locs:
assert filetype.is_file(location)
assert not filetype.is_special(location)
assert filetype.is_readable(location)
def check_no_error(result):
"""
Check that every ExtractEvent in the `result` list has no error or warning.
"""
for r in result:
assert not r.errors
assert not r.warnings
def is_posixpath(location):
"""
Return True if the `location` path is likely a POSIX-like path using POSIX path
separators (slash or "/")or has no path separator.
Return False if the `location` path is likely a Windows-like path using backslash
as path separators (e.g. "\").
"""
has_slashes = '/' in location
has_backslashes = '\\' in location
# windows paths with drive
if location:
drive, _ = ntpath.splitdrive(location)
if drive:
return False
# a path is always POSIX unless it contains ONLY backslahes
# which is a rough approximation (it could still be posix)
is_posix = True
if has_backslashes and not has_slashes:
is_posix = False
return is_posix
def to_posix(path):
"""
Return a path using the posix path separator given a path that may contain posix
or windows separators, converting \\ to /. NB: this path will still be valid in
the windows explorer (except as a UNC or share name). It will be a valid path
everywhere in Python. It will not be valid for windows command line operations.
"""
is_unicode = isinstance(path, compat.unicode)
ntpath_sep = is_unicode and u'\\' or '\\'
posixpath_sep = is_unicode and u'/' or '/'
if is_posixpath(path):
if on_windows:
return path.replace(ntpath_sep, posixpath_sep)
else:
return path
return path.replace(ntpath_sep, posixpath_sep)
class BaseArchiveTestCase(FileBasedTesting):
test_data_dir = os.path.join(os.path.dirname(__file__), 'data')
def check_get_extractors(self, test_file, expected, kinds=()):
from extractcode import archive
test_loc = self.get_test_loc(test_file)
if kinds:
extractors = archive.get_extractors(test_loc, kinds)
else:
extractors = archive.get_extractors(test_loc)
# import typecode
# ft = 'TODO' or typecode.contenttype.get_type(test_loc).filetype_file
# mt = 'TODO' or typecode.contenttype.get_type(test_loc).mimetype_file
fe = fileutils.file_extension(test_loc).lower()
em = ', '.join(e.__module__ + '.' + e.__name__ for e in extractors)
msg = ('%(expected)r == %(extractors)r for %(test_file)s\n'
'with fe:%(fe)r, em:%(em)s' % locals())
assert expected == extractors, msg
def assertRaisesInstance(self, excInstance, callableObj, *args, **kwargs):
"""
This assertion accepts an instance instead of a class for refined
exception testing.
"""
kwargs = kwargs or {}
excClass = excInstance.__class__
try:
callableObj(*args, **kwargs)
except excClass as e:
assert str(e).startswith(str(excInstance))
else:
if hasattr(excClass, '__name__'):
excName = excClass.__name__
else:
excName = str(excClass)
raise self.failureException('%s not raised' % excName)
def check_extract(self, test_function, test_file, expected, expected_warnings=None, check_all=False):
"""
Run the extraction `test_function` on `test_file` checking that a map of
expected paths --> size exist in the extracted target directory.
Does not test the presence of all files unless `check_all` is True.
"""
from extractcode import archive
test_file = self.get_test_loc(test_file)
test_dir = self.get_temp_dir()
warnings = test_function(test_file, test_dir)
if expected_warnings is not None:
assert expected_warnings == warnings
if check_all:
len_test_dir = len(test_dir)
extracted = {path[len_test_dir:]: filetype.get_size(path) for path in fileutils.resource_iter(test_dir, with_dirs=False)}
expected = {os.path.join(test_dir, exp_path): exp_size for exp_path, exp_size in expected.items()}
assert sorted(expected.items()) == sorted(extracted.items())
else:
for exp_path, exp_size in expected.items():
exp_loc = os.path.join(test_dir, exp_path)
msg = '''When extracting: %(test_file)s
With function: %(test_function)r
Failed to find expected path: %(exp_loc)s'''
assert os.path.exists(exp_loc), msg % locals()
if exp_size is not None:
res_size = os.stat(exp_loc).st_size
msg = '''When extracting: %(test_file)s
With function: %(test_function)r
Failed to assert the correct size %(exp_size)d
Got instead: %(res_size)d
for expected path: %(exp_loc)s'''
assert exp_size == res_size, msg % locals()
def collect_extracted_path(self, test_dir):
result = []
td = fileutils.as_posixpath(test_dir)
for t, dirs, files in os.walk(test_dir):
t = fileutils.as_posixpath(t)
for d in dirs:
nd = posixpath.join(t, d).replace(td, '') + '/'
result.append(nd)
for f in files:
nf = posixpath.join(t, f).replace(td, '')
result.append(nf)
result = sorted(result)
return result
def assertExceptionContains(self, text, callableObj, *args, **kwargs):
try:
callableObj(*args, **kwargs)
except Exception as e:
if text not in str(e):
raise self.failureException(
'Exception %(e)r raised, '
'it should contain the text %(text)r '
'and does not' % locals())
else:
raise self.failureException(
'Exception containing %(text)r not raised' % locals())
| 37.927039 | 133 | 0.640715 | [
"Apache-2.0",
"CC0-1.0"
] | adityaviki/scancode-toolk | tests/extractcode/extractcode_assert_utils.py | 8,837 | Python |
import os
import unittest
from tests.base import (
HPSS_ARCHIVE,
TOP_LEVEL,
ZSTASH_PATH,
TestZstash,
compare,
print_starred,
run_cmd,
write_file,
)
class TestUpdate(TestZstash):
"""
Test `zstash --update`.
"""
# x = on, no mark = off, b = both on and off tested
# option | Update | UpdateDryRun | UpdateKeep | UpdateCache | TestZstash.add_files (used in multiple tests)|
# --hpss |x|x|x|x|x|
# --cache | | | |x|b|
# --dry-run | |x| | | |
# --keep | | |x| |b|
# -v | | | | |b|
def helperUpdate(self, test_name, hpss_path, zstash_path=ZSTASH_PATH):
"""
Test `zstash update`.
"""
self.hpss_path = hpss_path
use_hpss = self.setupDirs(test_name)
self.create(use_hpss, zstash_path)
print_starred(
"Running update on the newly created directory, nothing should happen"
)
self.assertWorkspace()
os.chdir(self.test_dir)
cmd = "{}zstash update -v --hpss={}".format(zstash_path, self.hpss_path)
output, err = run_cmd(cmd)
os.chdir(TOP_LEVEL)
self.check_strings(cmd, output + err, ["Nothing to update"], ["ERROR"])
def helperUpdateDryRun(self, test_name, hpss_path, zstash_path=ZSTASH_PATH):
"""
Test `zstash update --dry-run`.
"""
self.hpss_path = hpss_path
use_hpss = self.setupDirs(test_name)
self.create(use_hpss, zstash_path)
print_starred("Testing update with an actual change")
self.assertWorkspace()
if not os.path.exists("{}/dir2".format(self.test_dir)):
os.mkdir("{}/dir2".format(self.test_dir))
write_file("{}/dir2/file2.txt".format(self.test_dir), "file2 stuff")
write_file("{}/dir/file1.txt".format(self.test_dir), "file1 stuff with changes")
os.chdir(self.test_dir)
cmd = "{}zstash update --dry-run --hpss={}".format(zstash_path, self.hpss_path)
output, err = run_cmd(cmd)
os.chdir(TOP_LEVEL)
expected_present = [
"List of files to be updated",
"dir/file1.txt",
"dir2/file2.txt",
]
# Make sure none of the old files or directories are moved.
expected_absent = [
"ERROR",
"file0",
"file_empty",
"empty_dir",
"INFO: Creating new tar archive",
]
self.check_strings(cmd, output + err, expected_present, expected_absent)
def helperUpdateKeep(self, test_name, hpss_path, zstash_path=ZSTASH_PATH):
"""
Test `zstash update --keep`.
"""
self.hpss_path = hpss_path
use_hpss = self.setupDirs(test_name)
# Not keeping the tar from `create`.
self.create(use_hpss, zstash_path)
self.add_files(use_hpss, zstash_path, keep=True)
files = os.listdir("{}/{}".format(self.test_dir, self.cache))
if use_hpss:
expected_files = [
"index.db",
"000003.tar",
"000004.tar",
"000001.tar",
"000002.tar",
]
else:
expected_files = [
"index.db",
"000003.tar",
"000004.tar",
"000000.tar",
"000001.tar",
"000002.tar",
]
if not compare(files, expected_files):
error_message = (
"The zstash cache does not contain expected files.\nIt has: {}".format(
files
)
)
self.stop(error_message)
os.chdir(TOP_LEVEL)
def helperUpdateCache(self, test_name, hpss_path, zstash_path=ZSTASH_PATH):
"""
Test `zstash update --cache`.
"""
self.hpss_path = hpss_path
self.cache = "my_cache"
use_hpss = self.setupDirs(test_name)
self.create(use_hpss, zstash_path, cache=self.cache)
self.add_files(use_hpss, zstash_path, cache=self.cache)
files = os.listdir("{}/{}".format(self.test_dir, self.cache))
if use_hpss:
expected_files = ["index.db"]
else:
expected_files = [
"index.db",
"000003.tar",
"000004.tar",
"000000.tar",
"000001.tar",
"000002.tar",
]
if not compare(files, expected_files):
error_message = (
"The zstash cache does not contain expected files.\nIt has: {}".format(
files
)
)
self.stop(error_message)
def testUpdate(self):
self.helperUpdate("testUpdate", "none")
def testUpdateHPSS(self):
self.conditional_hpss_skip()
self.helperUpdate("testUpdateHPSS", HPSS_ARCHIVE)
def testUpdateDryRun(self):
self.helperUpdateDryRun("testUpdateDryRun", "none")
def testUpdateDryRunHPSS(self):
self.conditional_hpss_skip()
self.helperUpdateDryRun("testUpdateDryRunHPSS", HPSS_ARCHIVE)
def testUpdateKeep(self):
self.helperUpdateKeep("testUpdateKeep", "none")
def testUpdateKeepHPSS(self):
self.conditional_hpss_skip()
self.helperUpdateKeep("testUpdateKeepHPSS", HPSS_ARCHIVE)
def testUpdateCache(self):
self.helperUpdateCache("testUpdateCache", "none")
def testUpdateCacheHPSS(self):
self.conditional_hpss_skip()
self.helperUpdateCache("testUpdateCacheHPSS", HPSS_ARCHIVE)
if __name__ == "__main__":
unittest.main()
| 32.274286 | 112 | 0.561969 | [
"BSD-3-Clause"
] | E3SM-Project/zstash | tests/test_update.py | 5,648 | Python |
# -*- coding: utf-8 -*-
"""
Created on Wed May 9 14:25:47 2018
@author: Steven
"""
import sys
import argparse
from radioxenon_ml.read_in import ml_matrix_composition as mlmc
from radioxenon_ml.solve import iterate
import numpy as np
"""
import radioxenon_ml.read_in.ml_matrix_composition
import radioxenon_ml.solve.iterate
import radioxenon_ml.solve.variance
"""
"""the master file for the radioxenon_ml package"""
parser = argparse.ArgumentParser(description='This is the master file for running the maximum likelihood package.')
parser.add_argument('-o', '--offset',
type=int,
default=84,
help='where to start the file selection from list of test files'
)
args = parser.parse_args(sys.argv[1:])
spectrum_file_location = 'radioxenon_ml/test_files/test'
offset = args.offset
err = 0.01 #acceptable error in normalized activity
scale_array = np.array([1,1,1,1]) #Should have elements equal to the number of isotopes
#scale_array = np.array([0.561,0.584,0.9,0.372,0.489,0.489,1]) #scaling factor for each simulation file
#currently taken from (Czyz, 2017)
n = np.shape(scale_array)[0] #number of simulated spectra
simulation, experiment, totcount = mlmc.form_matrix(spectrum_file_location,scale_array,n,offset); #known issue: requires UTF-8 encoding
#simulation, experiment = mlmc.scale_matrix(simulation_unscaled,experiment_unscaled,)
A,J,K,q=iterate.iterate(simulation, experiment, err)
print("\n_____________________________________\nTotal activity percents = " + str(A*100)) | 43.025641 | 138 | 0.690703 | [
"MIT"
] | sczyz/radioxenon_ml | ml_rxe.py | 1,678 | Python |
""" A class for testing a SSD model on a video file or webcam """
import cv2
import keras
from keras.applications.imagenet_utils import preprocess_input
from keras.backend.tensorflow_backend import set_session
from keras.models import Model
from keras.preprocessing import image
import pickle
import numpy as np
from random import shuffle
from scipy.misc import imread, imresize
from timeit import default_timer as timer
import sys
sys.path.append("..")
from ssd_utils import BBoxUtility
class VideoTest(object):
""" Class for testing a trained SSD model on a video file and show the
result in a window. Class is designed so that one VideoTest object
can be created for a model, and the same object can then be used on
multiple videos and webcams.
Arguments:
class_names: A list of strings, each containing the name of a class.
The first name should be that of the background class
which is not used.
model: An SSD model. It should already be trained for
images similar to the video to test on.
input_shape: The shape that the model expects for its input,
as a tuple, for example (300, 300, 3)
bbox_util: An instance of the BBoxUtility class in ssd_utils.py
The BBoxUtility needs to be instantiated with
the same number of classes as the length of
class_names.
"""
def __init__(self, class_names, model, input_shape):
self.class_names = class_names
self.num_classes = len(class_names)
self.model = model
self.input_shape = input_shape
self.bbox_util = BBoxUtility(self.num_classes)
# Create unique and somewhat visually distinguishable bright
# colors for the different classes.
self.class_colors = []
for i in range(0, self.num_classes):
# This can probably be written in a more elegant manner
hue = 255*i/self.num_classes
col = np.zeros((1,1,3)).astype("uint8")
col[0][0][0] = hue
col[0][0][1] = 128 # Saturation
col[0][0][2] = 255 # Value
cvcol = cv2.cvtColor(col, cv2.COLOR_HSV2BGR)
col = (int(cvcol[0][0][0]), int(cvcol[0][0][1]), int(cvcol[0][0][2]))
self.class_colors.append(col)
def run(self, video_path = 0, start_frame = 0, conf_thresh = 0.6):
""" Runs the test on a video (or webcam)
# Arguments
video_path: A file path to a video to be tested on. Can also be a number,
in which case the webcam with the same number (i.e. 0) is
used instead
start_frame: The number of the first frame of the video to be processed
by the network.
conf_thresh: Threshold of confidence. Any boxes with lower confidence
are not visualized.
"""
vid = cv2.VideoCapture(video_path)
if not vid.isOpened():
raise IOError(("Couldn't open video file or webcam. If you're "
"trying to open a webcam, make sure you video_path is an integer!"))
# Compute aspect ratio of video
vidw = vid.get(cv2.CAP_PROP_FRAME_WIDTH)
vidh = vid.get(cv2.CAP_PROP_FRAME_HEIGHT)
vidar = vidw/vidh
# Skip frames until reaching start_frame
if start_frame > 0:
vid.set(cv2.CAP_PROP_POS_MSEC, start_frame)
accum_time = 0
curr_fps = 0
fps = "FPS: ??"
prev_time = timer()
while True:
retval, orig_image = vid.read()
if not retval:
print("Done!")
return
im_size = (self.input_shape[0], self.input_shape[1])
resized = cv2.resize(orig_image, im_size)
rgb = cv2.cvtColor(resized, cv2.COLOR_BGR2RGB)
# Reshape to original aspect ratio for later visualization
# The resized version is used, to visualize what kind of resolution
# the network has to work with.
to_draw = cv2.resize(resized, (int(self.input_shape[0]*vidar), self.input_shape[1]))
# Use model to predict
inputs = [image.img_to_array(rgb)]
tmp_inp = np.array(inputs)
x = preprocess_input(tmp_inp)
y = self.model.predict(x)
# This line creates a new TensorFlow device every time. Is there a
# way to avoid that?
results = self.bbox_util.detection_out(y)
if len(results) > 0 and len(results[0]) > 0:
# Interpret output, only one frame is used
det_label = results[0][:, 0]
det_conf = results[0][:, 1]
det_xmin = results[0][:, 2]
det_ymin = results[0][:, 3]
det_xmax = results[0][:, 4]
det_ymax = results[0][:, 5]
top_indices = [i for i, conf in enumerate(det_conf) if conf >= conf_thresh]
top_conf = det_conf[top_indices]
top_label_indices = det_label[top_indices].tolist()
top_xmin = det_xmin[top_indices]
top_ymin = det_ymin[top_indices]
top_xmax = det_xmax[top_indices]
top_ymax = det_ymax[top_indices]
for i in range(top_conf.shape[0]):
xmin = int(round(top_xmin[i] * to_draw.shape[1]))
ymin = int(round(top_ymin[i] * to_draw.shape[0]))
xmax = int(round(top_xmax[i] * to_draw.shape[1]))
ymax = int(round(top_ymax[i] * to_draw.shape[0]))
# Draw the box on top of the to_draw image
class_num = int(top_label_indices[i])
cv2.rectangle(to_draw, (xmin, ymin), (xmax, ymax),
self.class_colors[class_num], 2)
text = self.class_names[class_num] + " " + ('%.2f' % top_conf[i])
text_top = (xmin, ymin-10)
text_bot = (xmin + 80, ymin + 5)
text_pos = (xmin + 5, ymin)
cv2.rectangle(to_draw, text_top, text_bot, self.class_colors[class_num], -1)
cv2.putText(to_draw, text, text_pos, cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0,0,0), 1)
# Calculate FPS
# This computes FPS for everything, not just the model's execution
# which may or may not be what you want
curr_time = timer()
exec_time = curr_time - prev_time
prev_time = curr_time
accum_time = accum_time + exec_time
curr_fps = curr_fps + 1
if accum_time > 1:
accum_time = accum_time - 1
fps = "FPS: " + str(curr_fps)
curr_fps = 0
# Draw FPS in top left corner
cv2.rectangle(to_draw, (0,0), (50, 17), (255,255,255), -1)
cv2.putText(to_draw, fps, (3,10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0,0,0), 1)
cv2.imshow("SSD result", to_draw)
cv2.waitKey(10)
| 41.625 | 100 | 0.536362 | [
"MIT"
] | hanhejia/SSD | testing_utils/videotest.py | 7,659 | Python |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Time stepping for Navier-Stokes equations."""
import dataclasses
from typing import Callable, Sequence, TypeVar
import jax
from jax_cfd.base import tree_math
PyTreeState = TypeVar("PyTreeState")
TimeStepFn = Callable[[PyTreeState], PyTreeState]
class ExplicitNavierStokesODE:
"""Spatially discretized version of Navier-Stokes.
The equation is given by:
∂u/∂t = explicit_terms(u)
0 = incompressibility_constraint(u)
"""
def __init__(self, explicit_terms, pressure_projection):
self.explicit_terms = explicit_terms
self.pressure_projection = pressure_projection
def explicit_terms(self, state):
"""Explicitly evaluate the ODE."""
raise NotImplementedError
def pressure_projection(self, state):
"""Enforce the incompressibility constraint."""
raise NotImplementedError
@dataclasses.dataclass
class ButcherTableau:
a: Sequence[Sequence[float]]
b: Sequence[float]
# TODO(shoyer): add c, when we support time-dependent equations.
def __post_init__(self):
if len(self.a) + 1 != len(self.b):
raise ValueError("inconsistent Butcher tableau")
def navier_stokes_rk(
tableau: ButcherTableau,
equation: ExplicitNavierStokesODE,
time_step: float,
) -> TimeStepFn:
"""Create a forward Runge-Kutta time-stepper for incompressible Navier-Stokes.
This function implements the reference method (equations 16-21), rather than
the fast projection method, from:
"Fast-Projection Methods for the Incompressible Navier–Stokes Equations"
Fluids 2020, 5, 222; doi:10.3390/fluids5040222
Args:
tableau: Butcher tableau.
equation: equation to use.
time_step: overall time-step size.
Returns:
Function that advances one time-step forward.
"""
# pylint: disable=invalid-name
dt = time_step
F = tree_math.pytree_to_vector_fun(equation.explicit_terms)
P = tree_math.pytree_to_vector_fun(equation.pressure_projection)
a = tableau.a
b = tableau.b
num_steps = len(b)
@tree_math.vector_to_pytree_fun
def step_fn(u0):
u = [None] * num_steps
k = [None] * num_steps
u[0] = u0
k[0] = F(u0)
for i in range(1, num_steps):
u_star = u0 + dt * sum(a[i-1][j] * k[j] for j in range(i) if a[i-1][j])
u[i] = P(u_star)
k[i] = F(u[i])
u_star = u0 + dt * sum(b[j] * k[j] for j in range(num_steps) if b[j])
u_final = P(u_star)
return u_final
return step_fn
def forward_euler(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[], b=[1]),
equation,
time_step),
name="forward_euler",
)
def midpoint_rk2(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[[1/2]], b=[0, 1]),
equation=equation,
time_step=time_step,
),
name="midpoint_rk2",
)
def heun_rk2(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[[1]], b=[1/2, 1/2]),
equation=equation,
time_step=time_step,
),
name="heun_rk2",
)
def classic_rk4(
equation: ExplicitNavierStokesODE, time_step: float,
) -> TimeStepFn:
return jax.named_call(
navier_stokes_rk(
ButcherTableau(a=[[1/2], [0, 1/2], [0, 0, 1]],
b=[1/6, 1/3, 1/3, 1/6]),
equation=equation,
time_step=time_step,
),
name="classic_rk4",
)
| 26.194969 | 80 | 0.677791 | [
"Apache-2.0"
] | google/jax-cfd | jax_cfd/base/time_stepping.py | 4,171 | Python |
import base64
import numpy as np
import cv2
path_save = "./images/"
def base64_cv2(base64_str):
imgString = base64.b64decode(base64_str)
nparr = np.fromstring(imgString,np.uint8)
image = cv2.imdecode(nparr,cv2.IMREAD_COLOR)
a = cv2.imwrite(path_save + "img_face_1" + ".jpg", image)
print(a)
return image
if __name__ == '__main__':
s = "/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAIBAQEBAQIBAQECAgICAgQDAgICAgUEBAMEBgUGBgYFBgYGBwkIBgcJBwYGCAsICQoKCgoKBggLDAsKDAkKCgr/2wBDAQICAgICAgUDAwUKBwYHCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgr/wAARCACBAIEDASIAAhEBAxEB/8QAHwAAAQUBAQEBAQEAAAAAAAAAAAECAwQFBgcICQoL/8QAtRAAAgEDAwIEAwUFBAQAAAF9AQIDAAQRBRIhMUEGE1FhByJxFDKBkaEII0KxwRVS0fAkM2JyggkKFhcYGRolJicoKSo0NTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uHi4+Tl5ufo6erx8vP09fb3+Pn6/8QAHwEAAwEBAQEBAQEBAQAAAAAAAAECAwQFBgcICQoL/8QAtREAAgECBAQDBAcFBAQAAQJ3AAECAxEEBSExBhJBUQdhcRMiMoEIFEKRobHBCSMzUvAVYnLRChYkNOEl8RcYGRomJygpKjU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6goOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4uPk5ebn6Onq8vP09fb3+Pn6/9oADAMBAAIRAxEAPwDw6HTNKtNTXUtXvLqGIHe1zfYzLnkYB5OfTjmtyx8cWNvZPJpmhtZ2xP7y8ucebn2U9P1qm0nhK4tZz4Zs572WVtsmoXXzJIxP3lB4B+nFdX4R+C+peJZLbUdfgklWFQRZRnt/ePqa8GpWjA+qo0ZTRH4H0GdrWfxLdSHyJyzvO8mfO/8A1dq8u+MfiWLX9Qj0fSWKxC3fzZlAzFECOOnVm217v8XLuHwv4PuIn0vyLa1G20jQYAJGMn8a+YNe1KbQNCvNXvYdt1cTJHZK3LzysSQq/THSs8PVUpOQ61OUUoFDQ/B9x4m8W2nhkIy2VjAdQ1+YnKwRrggZ6c+lee/tAX0Gr+IrrXYoCjXcix6ZagdIUyi5Hocbvxr3/wAQafL8J/hpb+G711t9c8Qw/wBoa9dSf8srWP5jH7buAR3rwvwN4D8TfHX4pwadpVjOZb2cpZqW+SGEnl2/D7vpXRSrpxdSWyM3QfOqS3Z237C37Jc/xu8cQx31o7aHpbJPf6gv3bifcCI/p2r9U9F8Aad4c0C30/SbNYEiQJ5X/PMAYH51zH7K37O/hz4F/Dy20TTLICUQr57seWb1PrXpGoXUUoaJxsZRwB396+Zx2Iq1qj7I97B4dUY8p4p+0NooPgTVIXQF3t3KH3CmvyC+JfhKWxb7Ax/e3WquxPYfNzX7KfGeNLrRbu3uFBT7LIRn/dNfkV8YtRtYfFlpZ53ZuJ3BPs5FevkD92ovM8zOoyj7N90ed+KvDxsryyhVtyyWUi89zuNcv4j0ww/E29s1LD5I3VQexTFeh61FFNc6MZUDMzSoFI7A5rnPFmleV8W/tjx7hc6dA4A+hFfQnhJJnJ+JbKf+01nK5WWzU5I6gHBrN8SWDXNjd6LMnzLCHQ9xxmuw8WWJSzgkULvjhaF19MtkVheKS0V1Hc7BuuVVG+gGKlP3zOokloef2Ci805LUuGeTdEd394d6+9P+CM15qD2XiHTRcubNrwTMjciN9oGB6dK+DZLdLHU5oYRtG7zIsfw89q+8v+CIWrW0njXxp4NnjzHcwR3tuMfdIA3AVRz1G0tD9EfOt/8An4k/Kit37Hpv/PiP++aKDDnl3OE/Z+/Zrm1zUI9Y1CIw2OStlAwznvuAHHOf0r6XT4P6R4S0O20jR9Ljk1C9cxRIo/1aYy7N9K6n9nfwZDH4RsNSlgEEMdqBc38i/LbpjkKPXGOfeuJ/bb+O+nfBrwJqVr4BBHinV7IxpxuOnWB+US57PJ1x6CvkKq+s1eTufd0ZuhRcz43/AGy/G/hi98dH4WeH/JudL0APLq1+r5E9xnI59BXkPwO+H1t8XfiFc/F3xZ+78PaHC8lt5v3YypB3DPckAfiaxdfsLrxbrFl4C0GOaIXtyJ9Zu2yZZsn7pr1X9oPxbYfAj4UWnwZ8MWUQ1XVQk94qdYox9xcehBzz3UV3yXKo4ePQ44yTTrT3ex4r+0j4pPijxa/h2yvTL9quRc6lKHwAoOLexT/dzlvrX2J/wTh/ZIj0fSX8ceIdMQXEifugBwOSOOOnFfHXwg+FHjD4gfE3To38O3F+iyfa289cb5c/NI2OnoB7V933fx9+Onwt8KQWGjeHLa2toYAkKR2hYnHqcVlia8IuNNK6R1YOjUcXNrVn1FFpljYW/wBhWEcIBwRxXMeLrSO3U3cCHBGNwFfFup/8FGf2hvD17IbzwdZXJXOPMt2Qn8hUFl/wVT17ULxYvGnwtNrF0aSzkL/iVrx6ictj0KUorRn0J8SNl9atBIoMUvyvuHVcf44r8U/j/quoWXxPlsbmNlfTNWubYgevng/y5r9WLT9pLwZ8TbA3vhy9BddshhYncgzzkHpX5u/8FF/htJ4N+NN3r+jKBaeJP9MhkboJFxuH19q9HKJKlNw7nFm1Pnw0J9UeWXXjCRSl6E/eWkjCJ89eGH+FUbnxjNqPiEeJLuUZjt1hhB/iORWNdXsN6BJbSg2jjO7vuNU72CC6vLe0SYBBIuBnvnvX0zaex8vFqSujt/EV5bapK6Iyfwnn3FcN4tu3knAcAGBtq4NbVtdRDfACHcTYyD2Fc/rlvLPqMq/KuZAQCe1YwbdZp7FyjFw1OfvLVpHFyR3xmvqT/gkR47/4Qj9q+x0kSEQaxZvbybjwG5I/PFfNGq2zxWRYEDY+SPWu5/ZI8Uv4X+P/AIa1uG4MQi1eFHYHnlx/jXU5ROCqmkfvp/Ys3980Vj/8LDk/57R/nRUXRyn0d4n+MWjeAvBkelaLZC6mtIlje2EYxcTfwRL65J546V8D/t9/GJvAKX3h7xXfW+peIJ5k1HxFPBgpJKR/o9mh9I+4Hevp2fxjoXwk8Ca3+1D8RoilxpQaPwlpkq5HmuoC3Ei/xEZ4BzjFfnz8O/h54i/ba/aEvtf8W6yttoOk3bX+sapLzCGySS2O1fLULcntOvQ+8lCUvc6dTT/Z50SfwZ4Yu/jx8RwUaMNNLLOPuswyqrnqf9kc0fB74XeJP2o/iJqnxk8Z2rjTrGTF5PO33mYhoLJP+mmAWb0AArpviqn/AAv34g2HwU+HNwLLw7osoFu0owrqrDN3N6MB9xT175r6R8F+EtF8JeEtO8BfDfSJJdJ0ouUkEf7y8mb79xJ6u3OD6dMVNbEzhG32n+Hka0cJGrNS+zHT1Nf4B+AvBnwysm1/WNCt4BNzcSSsAEP9wZ546YFaHxn+MHgyy8Pvdy6AVseSst2sdujD/ZMhBI9wK5nxjB+0B4jgOi/CbwjYQywAeRd6x8wVsH+E9TXwN+354F8V+CvF8Fh8YbXWvG2r+IfD0pi8QXepvb2+j3IXBhjhX5eCMjPWs8Bg5Y58kpWNsfjvqUeaMb+h7t4z/aE+DU2p7bizMKbTiWIJNHt9Tisybwz8K/GTQ6xp4t50kH+vhTaEz6rX58fsV/CDxb8QvGV5bavqWorpNlp7Jc3QuG+SXnbg5wa+mPhPqPiD4TfENvAur6mt7buf9Afd80691PPWqx2WLCO6lexyYHMp4puMoWPoXw98ARab9T8N6hiN/lj8w/OCf6VzH7Qv7GF58cPAF14U1HV1h1JAbjTLw52284HGf9k9MDvXrnwHu7/xVeNZWcTOY/vKx5TngfhzXtWvfDhtP0ea4nQrlAWwMjgV51CtJS5l0PTq0YVI8k9mfgN8XPgt8Y/gv4rm8JeOPCF3bzpISRHCXjmGc+YhHYjnHWuJvItZt5ftMNtcb1ORHJCymv18/aa1bQxqIh1iytrp4AVWWaIZiGevTmvmrxDovhjxXctHY6HbSz7iHKwKNo9cmvo6WbVHTXMtT53EZXCFS0HofDen+JNTt3802J3hssHyKq6prt5JqQup0kjVyD8wOCfQV9d6t4D+Bnhm/KeItR0+O66yISDj6YNc74t8A/Bjx3Y/ZrW/tgUciJYG2t9cYrphi5SfM4s5Z4F7KaPm+9vUvbKSQxOmCq4fvnvW18CGt7X4l6NfXbDaNXj+Vumd4A/UCtD4rfDjTPAUjT2uoyyQcYEwGR6fWqf7P2jy+Kfizo2nQpJ5EmqR9B0O7P8AOu2nL2kOZHm16TirH7S/25P/AM9LX8z/AI0Vm/8ACubn/npN+dFXys845z/gov8AtIX3xR8R3PwY8K6z5WkaFctHe3aLvE9y3/LNQPvBRtHHQg1wOjfFHVfA3w1tP2ePgho7y63ft5mop5JZp3cceaw6qD/D2q/4N/ZL+Mmp+GdHTwP4burrWfETPMLu5TP2dGOTO2eSWJYj2xX6Rf8ABOr/AIJJeFvgnpUfxC+J8banr9zGHuJ5k3E5GcDPTmvloVIRp8tPWR93LmT5qukevdo8J/Y4/wCCdfjTS9L/AOEp8Xxub7UcT6tPKSfOkPUJxyo7V9V2fwI0rwTZRW9pbOFhUbAVOa+p/wDhF9D0KwWxsNLjSKNcRjZjiuL8Z2VlPC7GFQT90YqJ4eUIuc3q9TrpYv2tlBWj0PBj4b02zuWR0IOMjjj8a8L/AGpfgF8OfidF53ieCJzEjFRNEHQHHavpDxbpcULyjeR3yteU+O2sr6N7aZMhUO4kda4VWnTm3FndCgqj1jdH55+Iv2frHwU02m+C7u0sLFZGfyrKAoW471wGk/s36dqnjm21yy1K/wDttrcrIsnmsUbnkYNfcPiH4faNqt3lLQD229a2fAfwI0KLVIpEsIyxOWIXin9Zry0buOeBo025R0Iv2NfgTeabdzeILqzbZdy8Bv7uOP619AfErwCkXhK4hSABhCRz9K7T4MfD2DTtHitYbNQFbsK674i+As+FZ5DBngj3+7W9HC/u23ocNSq1VSR+GP7b817pfxEm0meYEtOwAz/DzXzP8VvHvjTwro9ho3hfwzc3j6nOs+qtbQkMbVWGYgy9Cf5V9lf8FNfAFxoXxKGqBQoLMcEV8/8AgbW7mwnjEZQvjYu4ZGOta4acYNSavY5MXTlO6Ttc+PI/Ds/i/wCOBsNBtbu3s7vUy5tZXZvIjzna2fxr2n4/fDXwh4Os7LUPCTpBqaoqboJMbm9frXvmufDvwj4keTWbzQlhvjgtc2qKhPryBzXnnif4U6CL77UWuJPLcNH9ok3EV7M81o1I8qVmebHLpUVzuTZ4H8fEvv8AhDrO58QSO12IYU+Ycsc/rXr3/BNr4EJ49+LnhS2hR3vJtclupXhOUit7dASGHbnP5V57+1rpix6PosgDYk1EALjl+SFXH9K/RT/git+zbceEfh/ffG7xTY7L7VXaysIiMCFIxhmAPQsSc16GB5alDU8rGy5JcqPsD/hDbT/ngf8Avmiu1/sFPUUV3eyXc8o+jv2UPg3oqCXxXdaYiK7NDZJtBEMEZ2Io/AZ7da+jpPKtLBI4FHyrgDAFcv8ADbw7BoGhQacLUR+Uu1wON3PWuivJUERxnr0r5bC4eNCF1ufU4+s8TiVfZGFrd2Xg2s+MjGa878cXBHyqeF4yO9dxrUkZtSxHO7GPSuM122WfLMuQTXPipSuz1MHGNjyPxzd5eXII4rx7xjZz3LkwuVBbOQOo9K9t8d6aHmkKxkjpwK848SaLgALGRg9DXlODbufQ4erZJHmK20Vtc7nAJB6kV6N8ILW0mvzLKUK7h1riNf0mSNmmC8YOOe9bn7PE91q/xBtdBnzsdwWUHrTpJe0RWJ1gz7B+Gmh2M9jCygKD0xXReONJs30NoWX5QGH14pnhLwxf6bAj2jlUQD5CvWnfFLVYLLw4zi5jVgpyA2STXvJL6ufJ1K3Nikon5H/8FgfhpG+lrr1na4aNiC47ivz00nbbXiLEQxVgeDzn0r9Vf+Cm8llq/wAPriKXGRGSmeufTFfljotkJNaljVfuuSR75ryI7tHqVEuZHpmhSLqdqscp2/KOlUfF3hu0W2ZwvzY64q34Zj+zOok6ECpfGt5CbJxCx3BeFAqrIppKBh/s8/stWP7Q/wAc9A1LxJZiXw/4TvXvr+KVciWVAWVT7Gv0n/Zn0K2tvh+rW9rHbxzapcSwxRxbVRGc4AH4V84/sVeF5PCPwP1fxpexlJtYdjAzLjgDYpHtmvrz4SaMdK8GaZYOwJjtQz5GCCx3Y/Wvq8BBqij4fH1E8Q7nT/ZE9R+VFXvs8P8Ac/WivWPNPuq12BMDAOag1i1nu7B4LWcRyMflYCltX2yMH4OKkklTZuzwK+Xd7HvvdHM+IPLCYXHPXFcvqvlG2YN1zxjrXVeIYfLjMgA4HFcjqU29SpUDnrmuCqnfU93DPQ5DxZpccg3RHAzzXmvi+wdZTtQkAHkCvUte3SExoM8Vy2taRCsLSTclhnbXLUWmh6dOo4s8Y1jRbq8BgEf04xmui+FXgK88Gazb+MLO3CzxPn5jkEeldBZaPa3moFTECoPGa6E2JtljWDhV5xjioo07aoJYhtWaKfx0/az/AGhtE0GOL4LfCOLV7rbtxe6gIIj68is3Qfjrq3iz4YS698TNIXR9UsI8alp0d35qrIemG7ir2qXiW1q864LA/vF3cEZ7DtXx1+0v8W9e1XxpqnhLw34gezjitpGdbVgQSB/FjqaVWpVg9WOFOi43UEvM4/8Abq+Lg8VtLp1rMgjddqNMcBh6818VWOk21l4luIoZInJOWKHI/Csz4j/8Jf4r8es/iTxdqV0sLnyYnuWVQc+n9K2fD2k/ZWEoQZwMsWyTTpwcVfuctSsnW0OjsowrRlG4XPekTRbzxb4os/DWnxM0tzMquPRMjJ/KkhlECEsPvE5PoK9K/Zo8ORw6rc+LtYtt8u39wX/gA7iurDUnOumZY7E+ywzfU+gtH0S10/RvDXwu0wKIWmRHSMjiFBzwPcZr6N8KRwFEZY8ZUADHHHFeA/CC6HiHxNceKWRTFDEILE47d2H1r6B8Jgi3QmvsKCskfATk5TbZ0eI/RaKbsb0orqIuj7G1DVI7S4eRyQD0UdqdbanDd2oaOXJIzj0rnfF+ri3EjlgAK4q1+JEmh3xeebdAT857r9K+DeKaqWlsfdxwMZUYyPQ/ENyDbkt0A5FclNEkuZNhxnjmrcni+x1a3TyZVk3c5XoPas+6vYI3KRvwR0qpVFNGtOk4mRq23cWYDg4ArmfFMqRwM/onSug124hC7gT69a43xXqkLWpTBLbDnBrHY7VscrN438OeGZXOpakqFeRIeF/M1yXxC/bp/Z0+FsCweIPiPYXN4w/48bWZXf6cHr7V43+0f+ydrf7THiC3027+IGpaRZ2kxbZY3DL5oHO0kHv0rzDV/wDgkJ+yrpmrt4hvZLxddQCSK5mvZGDsB1OWxRRtUi3LQ9HAZdTx7XO9e3c9q1X/AIKHw+PLC+g+G3gRJEjQp5l2ArAHvj8K+XPjb8Vv+EJ1hdYufD1rLq2sW0hZoTwi45JHr+Nc98Tvg5qPw2kn1HwP8TLqGZJirJIuQwA6DA6fWvmn47aB+0VqmjjVLb4kRwSO5ZblYyWVQfu4OQKqOGhUloz3quXPAUGvY9N9zZ1HWZdV1qPUxCXW4LFgo5j+talrK/mhI2+XHJzXlvwy+JXi+98Uad4L8T6PHczTDEuoxHaHxwTjtmvZ9Q0a0snWW1RmMgwsacnNOcXF2Pi6kYczaViTw7p134h1e10eAjM8gDFj0x1r2rwzpcOlzRaXDfnYjBZAh6+ozXI+CPAMvhzSBrN6P39yd6BhzHxW3ocgstRi+Y5L5OT1r2cHh3TpczWrPlswxvtans09j6b+FS2lpaxQ28aJGMYRRXtHgyUNECW4J4HpXz/8LNVSSKHc3XFe5eDbxRbja3Oa9qhsjw60W5HdbR/fFFUPtx9VorqMuVn0R8QtY3xSor9GzxXkPizxGVLujEBR0Brr/GniASpKofndnGO1ePeNNbmM0pUjHTOa/KcROXNc/WcPCLjY3NI+K+reFtstmnn2zNmWFm5A9vSu/wDD/wASdD8UwLPpl6rttzLGZDuSvnWXXFUKZG474qsniXUdHl/tfw7ceVOh4V2wGHXB/KnSxU0+WxFSgrcyPpi+vPte4hsqvA+bNctrdu9y+1W4Jwea5X4e/H/w/wCJZ10fWHGnaiqjzopW/dy/TNdjcXVvJKoUhi3IwMcdq6lU5nucyb7HEX2nSWd87QPs2gsNvrXmHxlv9YktDJYzlZ1Q4VYwQ31r2rVNNt5pXkfGADmvK/iJb2SR3BVRtweamTai7HTTk4SXI7Hxl8W9C8W+I9QkOpyoDj/Vqh5H4dK8c8e+CtQi037I88IhXqIY8sPzr6o8fW9qb2Rggx7V8+fFbVrfSri4SNcEDc5I4A6dacJ1OWy3O2rmmNUOXndrdTyjSPB+laHdtqsEZEnH7x1G4j+lfRf7OP7OeseLrQfEPxdA0Ol2z77GKQbftj/3h6KPT2qx+yJ+x9q3xguoPiZ8QtOa38MwTbraBn2vqEinOCDyIx9Oa+wdb0qwg0MaVp9ikMFuoS3hiHCKBivqMry6pKCq1lqj4PN80bg4U3fuz5c8e6SILySBEUKjYAQYUewriZIfJ1ILjo/Few/EbQzvmBj/AI68t1+z+yzllXgda9Sorux85CTWp6f8IdVCmKJjnaQOa+hfAt8kkargYr5R+E+sFLyNN3fivpH4eX5NvGCw61tR0Lk+bU9K+0+9FVftY9TRXUZnrPi//WS/7v8AjXkXjHo/+/RRX5NXP1nDdTjLn7g+lQn/AFK/9dBRRWcSp/w2cj4t/wCRjtv+uy/zr6T8H/8AIBtv+uIooropfEccfhLlz/yD5v8Ark38jXlPjv8A48ZfoaKK3l8JcPiR87fEv/j4P+61fN/xY/15/wCvhP8A0IUUVvg/40fUxxn8KXofo58J/wDkifhX/sGxf+ihTtQ+5J9KKK/SqX8JH5vX+16niPxM/wCPif8A368e8Wffk+lFFefU+NmS2RP8Mf8AkKR/71fSnw5/49ovrRRWlE06HotFFFdJB//Z"
base64_cv2(s)
| 438.5 | 9,266 | 0.94589 | [
"Apache-2.0"
] | Cambio-Project/trainticket-fork | ts-avatar-service/base64toimage.py | 9,647 | Python |
# Copyright (c) 2015 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import importutils
from cinder import exception
from cinder.i18n import _
from cinder.i18n import _LE
from cinder.i18n import _LI
from cinder.volume import configuration as config
from cinder.zonemanager import utils as fczm_utils
LOG = logging.getLogger(__name__)
controller_list = ['A', 'B', 'C', 'D']
zone_manager_opts = [
cfg.StrOpt('zone_driver',
default='cinder.zonemanager.drivers.brocade.brcd_fc_zone_driver'
'.BrcdFCZoneDriver',
help='FC Zone Driver responsible for zone management')
]
class FCZoneHelper(object):
"""FC zone helper for Huawei driver."""
def __init__(self, zm, client):
self.zm = zm
self.client = client
def _check_fc_port_and_init(self, wwns, hostid, fabric_map, nsinfos):
"""Check FC port on array and wwn on host is connected to switch.
If no FC port on array is connected to switch or no ini on host is
connected to switch, raise a error.
"""
if not fabric_map:
msg = _('No FC port on array is connected to switch.')
LOG.error(msg)
raise exception.CinderException(msg)
no_wwn_connected_to_switch = True
for wwn in wwns:
formatted_initiator = fczm_utils.get_formatted_wwn(wwn)
for fabric in fabric_map:
nsinfo = nsinfos[fabric]
if formatted_initiator in nsinfo:
no_wwn_connected_to_switch = False
self.client.ensure_fc_initiator_added(wwn, hostid)
break
if no_wwn_connected_to_switch:
msg = _('No wwn on host is connected to switch.')
LOG.error(msg)
raise exception.CinderException(msg)
def build_ini_tgt_map(self, wwns, host_id, port_list, is_add):
fabric_map = self.zm.get_san_context(port_list)
nsinfos = {}
cfgmap_from_fabrics = {}
for fabric in fabric_map:
nsinfos[fabric] = self._get_nameserver_info(fabric)
cfgmap_from_fabric = self._get_active_zone_set(fabric)
cfgmap_from_fabrics[fabric] = cfgmap_from_fabric
self._check_fc_port_and_init(wwns, host_id, fabric_map, nsinfos)
return self._build_ini_tgt_map(wwns, is_add, nsinfos,
cfgmap_from_fabrics)
def _build_ini_tgt_map(self, wwns, need_add_con, nsinfos,
cfgmap_from_fabrics):
tgt_port_wwns = []
init_targ_map_total = {}
fabric_maps = {}
for contr in controller_list:
port_list_from_contr = self.client.get_fc_ports_from_contr(contr)
if port_list_from_contr:
fabric_map = self.zm.get_san_context(port_list_from_contr)
fabric_maps[contr] = fabric_map
for wwn in wwns:
init_targ_map = {}
tmp_port_list = []
tgt_port_for_map = []
tmp_flag = False
need_new_zone = False
for contr in fabric_maps:
(fc_port_for_zone, tmp_flag) = \
self._get_one_fc_port_for_zone(wwn, contr, nsinfos,
cfgmap_from_fabrics,
fabric_maps)
if tmp_flag:
need_new_zone = True
if fc_port_for_zone:
tgt_port_wwns.append(fc_port_for_zone)
if not tmp_flag:
tgt_port_for_map.append(fc_port_for_zone)
if tmp_flag:
tmp_port_list.append(fc_port_for_zone)
init_targ_map[wwn] = tmp_port_list
LOG.debug("tmp_port_list: %s" % tmp_port_list)
init_targ_map_total[wwn] = tgt_port_for_map
if need_new_zone and need_add_con:
LOG.debug("Got init_targ_map to create zone: %s"
% init_targ_map)
self.zm.add_connection(init_targ_map)
tgt_port_wwns = list(set(tgt_port_wwns))
return (tgt_port_wwns, init_targ_map_total)
def _get_fabric_vendor(self):
zone_config = config.Configuration(zone_manager_opts,
'fc-zone-manager')
fabric_driver = zone_config.zone_driver
LOG.debug('Using fabric driver: %s' % fabric_driver)
driver_vendor = None
try:
driver_vendor = fabric_driver.split('.')[3]
except Exception:
msg = _('Get fabric driver vendor error.')
LOG.exception(msg)
raise exception.VolumeBackendAPIException(data=msg)
return driver_vendor
def _get_nameserver_info(self, fabric):
driver_vendor = self._get_fabric_vendor()
if driver_vendor == 'brocade':
nsinfo = self._get_brcd_nsinfo(fabric)
elif driver_vendor == 'cisco':
nsinfo = self._get_cisco_nsinfo(fabric)
else:
msg = ('Unsupported fabric, vendor name: %s.' % driver_vendor)
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return nsinfo
def _get_cisco_config(self, fabric):
fabric_ip = self.zm.driver.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_address')
fabric_user = self.zm.driver.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_user')
fabric_pwd = self.zm.driver.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_password')
fabric_port = self.zm.driver.fabric_configs[fabric].safe_get(
'cisco_fc_fabric_port')
zoning_vsan = self.zm.driver.fabric_configs[fabric].safe_get(
'cisco_zoning_vsan')
return (fabric_ip, fabric_user, fabric_pwd, fabric_port, zoning_vsan)
def _get_brcd_nsinfo(self, fabric):
conn = self.zm.driver._get_cli_client(fabric)
try:
nsinfo = conn.get_nameserver_info()
LOG.debug("name server info from fabric: %s", nsinfo)
conn.cleanup()
except exception.BrocadeZoningCliException:
if not conn.is_supported_firmware():
msg = _("Unsupported firmware on switch %s. Make sure "
"switch is running firmware v6.4 or higher."
) % conn.switch_ip
LOG.error(msg)
raise exception.FCZoneDriverException(msg)
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Error getting name server info."))
except Exception:
msg = _("Failed to get name server info.")
LOG.exception(msg)
raise exception.FCZoneDriverException(msg)
return nsinfo
def _get_cisco_nsinfo(self, fabric):
(fabric_ip, fabric_user, fabric_pwd, fabric_port, zoning_vsan) = (
self._get_cisco_config(fabric))
try:
conn = importutils.import_object(
self.zm.driver.configuration.cisco_sb_connector,
ipaddress=fabric_ip,
username=fabric_user,
password=fabric_pwd, port=fabric_port,
vsan=zoning_vsan)
nsinfo = conn.get_nameserver_info()
LOG.debug("name server info from fabric: %s",
nsinfo)
conn.cleanup()
except exception.CiscoZoningCliException:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Error getting show fcns database "
"info."))
except Exception:
msg = ("Failed to get show fcns database info.")
LOG.exception(msg)
raise exception.FCZoneDriverException(msg)
return nsinfo
def _get_one_fc_port_for_zone(self, initiator, contr, nsinfos,
cfgmap_from_fabrics, fabric_maps):
"""Get on FC port per one controller.
task flow:
1. Get all the FC port from the array.
2. Filter out ports belonged to the specific controller
and the status is connected.
3. Filter out ports connected to the fabric configured in cinder.conf.
4. Get active zones set from switch.
5. Find a port according to three cases.
"""
LOG.info(_LI("Get in function _get_one_fc_port_for_zone. "
"Initiator: %s"), initiator)
formatted_initiator = fczm_utils.get_formatted_wwn(initiator)
fabric_map = fabric_maps[contr]
if not fabric_map:
return (None, False)
port_zone_number_map = {}
for fabric in fabric_map:
LOG.info(_LI("Dealing with fabric: %s"), fabric)
nsinfo = nsinfos[fabric]
if formatted_initiator not in nsinfo:
continue
final_port_list_per_fabric = fabric_map[fabric]
cfgmap_from_fabric = cfgmap_from_fabrics[fabric]
zones_members = cfgmap_from_fabric['zones'].values()
for port in final_port_list_per_fabric:
port_zone_number_map[port] = 0
formatted_port = fczm_utils.get_formatted_wwn(port)
for zones_member in zones_members:
if formatted_port in zones_member:
# For the second case use.
if formatted_initiator in zones_member:
# First case: found a port in the same
# zone with the given initiator.
return (port, False)
# For the third case use.
port_zone_number_map[port] += 1
if port_zone_number_map == {}:
return (None, False)
temp_list = []
temp_list = sorted(port_zone_number_map.items(), key=lambda d: d[1])
# Third case: find a port referenced in fewest zone.
return (temp_list[0][0], True)
def _get_active_zone_set(self, fabric):
driver_vendor = self._get_fabric_vendor()
if driver_vendor == 'brocade':
conn = self.zm.driver._get_cli_client(fabric)
cfgmap_from_fabric = self.zm.driver._get_active_zone_set(conn)
conn.cleanup()
elif driver_vendor == 'cisco':
(fabric_ip, fabric_user, fabric_pwd, fabric_port, zoning_vsan) = (
self._get_cisco_config(fabric))
cfgmap_from_fabric = self.zm.driver.get_active_zone_set(
fabric_ip, fabric_user, fabric_pwd, fabric_port, zoning_vsan)
else:
msg = ('Unsupported fabric, vendor name: %s.' % driver_vendor)
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
return cfgmap_from_fabric
| 41.274021 | 79 | 0.604846 | [
"Apache-2.0"
] | Huawei/OpenStack_Driver | Cinder/Mitaka/extend/fc_zone_helper.py | 11,598 | Python |
# This code is licensed under the MIT License (see LICENSE file for details)
import concurrent.futures as futures
import contextlib
import inspect
import json
import logging
import pathlib
import platform
import sys
import time
from zplib import datafile
from zplib.image import threaded_io
from elegant import load_data
from ..util import log_util
from ..util import timer
class DummyIO:
def __init__(self, logger):
self.logger = logger
def write(self, *args, **kws):
self.logger.warning('Trying to write files, but file writing was disabled!')
def wait(self):
return
class TimepointHandler:
IMAGE_COMPRESSION = threaded_io.COMPRESSION.DEFAULT
LOG_LEVEL = logging.INFO
IO_THREADS = 4
MAX_IO_JOBS = 256 # max pending image writes before the threaded IO will block.
def __init__(self, data_dir, log_level=None, scope_host='127.0.0.1', dry_run=False):
"""Setup the basic code to take a single timepoint from a timecourse experiment.
Parameters:
data_dir: directory where the data and metadata-files should be read/written.
io_threads: number of threads to use to save image data out.
loglevel: level from logging library at which to log information to the
logfile in data_dir. (Subclasses can log information with self.logger)
If not specified, fall back to the class attribute LOG_LEVEL. This
allows a subclass to set a default log level, which still can be
over-ridden from the command line.
scope_host: IP address to connect to the scope server. If None, run without
a scope server.
dry_run: if True, do not write any files (including log files; log entries
will be printed to the console).
"""
self.data_dir = pathlib.Path(data_dir).resolve() # get an absolute path
self.experiment_metadata_path = self.data_dir / 'experiment_metadata.json'
with self.experiment_metadata_path.open('r') as f:
self.experiment_metadata = json.load(f)
self.experiment_metadata['node'] = platform.node()
self.positions = self.experiment_metadata['positions'] # dict mapping names to (x,y,z) stage positions
self.skip_positions = set()
annotations = load_data.read_annotations(self.data_dir)
for position in self.positions.keys():
if position in annotations:
position_annotations, timepoint_annotations = annotations[position]
if position_annotations.get('exclude'):
self.skip_positions.add(position)
else:
for annotation in timepoint_annotations.values():
if annotation.get('stage') == 'dead':
self.skip_positions.add(position)
break
if scope_host is not None:
from .. import scope_client
self.scope = scope_client.ScopeClient(scope_host)
if hasattr(self.scope, 'camera'):
self.scope.camera.return_to_default_state()
else:
self.scope = None
self.write_files = not dry_run
self.logger = log_util.get_logger(str(data_dir))
if log_level is None:
log_level = self.LOG_LEVEL
elif isinstance(log_level, str):
log_level = getattr(logging, log_level)
self.logger.setLevel(log_level)
if self.write_files:
self.image_io = threaded_io.ThreadedIO(self.IO_THREADS, self.MAX_IO_JOBS)
handler = logging.FileHandler(str(self.data_dir/'acquisitions.log'))
else:
self.image_io = DummyIO(self.logger)
handler = logging.StreamHandler()
handler.setFormatter(log_util.get_formatter())
self.logger.addHandler(handler)
self._job_thread = None
def heartbeat(self):
print('heartbeat') # write a line to stdout to serve as a heartbeat
@contextlib.contextmanager
def heartbeat_timer(self):
heartbeat_timer = timer.Timer(self.heartbeat, interval=60)
yield
heartbeat_timer.stop()
@contextlib.contextmanager
def debug_timing(self, task):
t0 = time.time()
yield
self.logger.debug(f'{task} complete ({{:.1f}} seconds)', time.time() - t0)
def run_all_positions(self):
for position_name, position_coords in sorted(self.positions.items()):
if position_name not in self.skip_positions:
self.logger.info(f'Acquiring Position {position_name}')
with self.debug_timing(f'Position {position_name}'):
self.run_position(position_name, position_coords)
self.heartbeat()
def run_timepoint(self, scheduled_start):
try:
self.heartbeat()
self.timepoint_prefix = time.strftime('%Y-%m-%dt%H%M')
self.scheduled_start = scheduled_start
self.start_time = time.time()
self._job_futures = []
self.logger.info('Starting timepoint {} ({:.0f} minutes after scheduled)', self.timepoint_prefix,
(self.start_time-self.scheduled_start)/60)
# record the timepoint prefix and timestamp for this timepoint into the
# experiment metadata
self.experiment_metadata.setdefault('timepoints', []).append(self.timepoint_prefix)
self.experiment_metadata.setdefault('timestamps', []).append(self.start_time)
self.logger.info('Configuring timepoint')
with self.debug_timing('Configuration'):
self.configure_timepoint()
self.heartbeat()
self.run_all_positions()
self.finalize_timepoint()
self.heartbeat()
self.end_time = time.time()
self.experiment_metadata.setdefault('durations', []).append(self.end_time - self.start_time)
if self.write_files:
self._write_atomic_json(self.experiment_metadata_path, self.experiment_metadata)
run_again = self.skip_positions != self.positions.keys() # don't run again if we're skipping all the positions
# wait for all queued background jobs to complete.
with self.debug_timing('Image IO'), self.heartbeat_timer():
self.image_io.wait()
if self._job_futures:
# wait for all queued background jobs to complete.
with self.debug_timing('Background jobs'), self.heartbeat_timer():
futures.wait(self._job_futures)
# now get the result() from each future, which will raise any errors encountered
# during the execution.
[f.result() for f in self._job_futures]
self.cleanup()
self.logger.info('Timepoint {} ended ({:.0f} minutes after starting)', self.timepoint_prefix,
(time.time()-self.start_time)/60)
if run_again:
return self.get_next_run_time()
except:
self.logger.error('Exception in timepoint:', exc_info=True)
raise
def add_background_job(self, function, *args, **kws):
"""Add a function with parameters *args and **kws to a queue to be completed
asynchronously with the rest of the timepoint acquisition. This will be
run in a background thread, so make sure that the function acts in a
threadsafe manner. (NB: self.logger *is* thread-safe.)
All queued functions will be waited for completion before the timepoint
ends. Any exceptions will be propagated to the foreground after all
functions queued either finish or raise an exception.
"""
if self._job_thread is None:
self._job_thread = futures.ThreadPoolExecutor(max_workers=1)
self._job_futures.append(self._job_thread.submit(function, *args, **kws))
def _position_metadata(self, position_name):
position_dir = self.data_dir / position_name
metadata_path = position_dir / 'position_metadata.json'
if metadata_path.exists():
with metadata_path.open('r') as f:
position_metadata = json.load(f)
else:
position_metadata = []
return position_dir, metadata_path, position_metadata
def run_position(self, position_name, position_coords):
"""Do everything required for taking a timepoint at a single position
EXCEPT focusing / image acquisition. This includes moving the stage to
the right x,y position, loading and saving metadata, and saving image
data, as generated by acquire_images()"""
timestamp = time.time()
position_dir, metadata_path, position_metadata = self._position_metadata(position_name)
position_dir.mkdir(exist_ok=True)
if self.scope is not None:
with self.debug_timing('Stage positioning'):
self.scope.stage.position = position_coords
images, image_names, new_metadata = self.acquire_images(position_name, position_dir, position_metadata)
new_metadata['timestamp'] = timestamp
new_metadata['timepoint'] = self.timepoint_prefix
position_metadata.append(new_metadata)
self.finalize_acquisition(position_name, position_dir, position_metadata)
image_paths = [position_dir / (self.timepoint_prefix + ' ' + name) for name in image_names]
if new_metadata is None:
new_metadata = {}
if self.write_files:
self.image_io.write(images, image_paths, self.IMAGE_COMPRESSION)
self._write_atomic_json(metadata_path, position_metadata)
def _write_atomic_json(self, out_path, data):
datafile.json_encode_atomic_legible_to_file(data, out_path)
def configure_timepoint(self):
"""Override this method with global configuration for the image acquisitions
(e.g. camera configuration). Member variables 'scope', 'experiment_metadata',
'timepoint_prefix', and 'positions' may be specifically useful."""
pass
def finalize_timepoint(self):
"""Override this method with global finalization after the images have been
acquired for each position. Useful for altering the self.experiment_metadata
dictionary before it is saved out.
"""
pass
def finalize_acquisition(self, position_name, position_dir, position_metadata):
"""Called after acquiring images for a single postiion.
Parameters:
position_name: name of the position in the experiment metadata file.
position_dir: pathlib.Path object representing the directory where
position-specific data files and outputs are written. Useful for
reading previous image data.
position_metadata: list of all the stored position metadata from the
previous timepoints, in chronological order. This includes data
from the latest timepoint, accessible as: position_metadata[-1].
"""
pass
def cleanup(self):
"""Override this method with any global cleanup/finalization tasks
that may be necessary."""
pass
def get_next_run_time(self):
"""Override this method to return when the next timepoint run should be
scheduled. Returning None means no future runs will be scheduled."""
return None
def acquire_images(self, position_name, position_dir, position_metadata):
"""Override this method in a subclass to define the image-acquisition sequence.
All most subclasses will need to do is return the following as a tuple:
(images, image_names, new_metadata), where:
images is a list of the acquired images
image_names is a list of the generic names for each of these images
(not timepoint- or position-specific; e.g. 'GFP.png' or some such)
new_metadata is a dictionary of timepoint-specific information, such
as the latest focal plane z-position or similar. This will be
made available to future acquisition runs via the 'position_metadata'
argument described below.
The images and metadata will be written out by the superclass, and
must not be written by the overriding subclass.
Optionally, subclasses may choose to enter 'position_name' into the
self.skip_positions set to indicate that in the future this position
should not be acquired. (E.g. the worm is dead.)
Parameters:
position_name: identifier for this image-acquisition position. Useful
for adding this position to the skip_positions set.
position_dir: pathlib.Path object representing the directory where
position-specific data files and outputs should be written. Useful
only if additional data needs to be read in or out during
acquisition. (E.g. a background model or similar.)
position_metadata: list of all the stored position metadata from the
previous timepoints, in chronological order. In particular, this
dictionary is guaranteed to contain 'timestamp' which is the
time.time() at which that acquisition was started. Other values
(such as the latest focal plane) stored by previous acquisition
runs will also be available. The most recent metadata will be in
position_metadata[-1].
"""
raise NotImplementedError()
@classmethod
def main(cls, timepoint_dir=None, **cls_init_args):
"""Main method to run a timepoint.
Parse sys.argv to find an (optional) scheduled_start time as a positional
argument. Any arguments that contain an '=' will be assumed to be
python variable definitions to pass to the class init method. (Leading
'-' or '--' will be stripped, and internal '-'s will be converted to '_'.)
e.g. this allows the following usage: ./acquire.py --dry-run=True --log-level=logging.DEBUG
Parameters:
timepoint_dir: location of timepoint directory. If not specified, default
to the parent dir of the file that defines the class that this
method is called on.
**cls_init_args: dict of arguments to pass to the class init method.
"""
if timepoint_dir is None:
timepoint_dir = pathlib.Path(inspect.getfile(cls)).parent
scheduled_start = None
for arg in sys.argv[1:]:
if arg.count('='):
while arg.startswith('-'):
arg = arg[1:]
arg = arg.replace('-', '_')
# execute the argument in a restricted namespace containing only 'logging', and store the
# result in the args to pass to the class.
exec(arg, dict(logging=logging), cls_init_args)
elif scheduled_start is None:
scheduled_start = float(arg)
else:
raise ValueError('More than one schedule start time provided')
if scheduled_start is None:
scheduled_start = time.time()
handler = cls(timepoint_dir, **cls_init_args)
next_run_time = handler.run_timepoint(scheduled_start)
if next_run_time:
print('next run:{}'.format(next_run_time))
| 47.837423 | 122 | 0.648092 | [
"MIT"
] | drew-sinha/rpc-scope | scope/timecourse/base_handler.py | 15,595 | Python |
from logging import warning
from os import path
from typing import Optional, List
from lxml.html import HtmlElement
from manga_py.http import Http
from .params import ProviderParams
class Base(ProviderParams):
_storage = None
_params = None
_image_params = None
_http_kwargs = None
__http = None
__arguments = None
chapter_id = 0
quiet = False
original_url = None
def __init__(self):
self._storage = {
'cookies': {},
'main_content': None,
'chapters': [],
'current_chapter': 0,
'current_file': 0,
'proxies': {},
'domain_uri': None,
}
self._params = {
'destination': 'Manga',
'cf-protect': False,
}
self._image_params = {
'crop': (0, 0, 0, 0),
# 'crop': (left, upper, right, lower)
'auto_crop': False,
# 'auto_crop': True,
}
self._http_kwargs = {}
def _archive_type(self) -> str:
arc_type = 'zip'
if self._params['cbz']:
arc_type = 'cbz'
return arc_type
def get_url(self):
return self._params['url']
def _build_http_params(self, params):
if params is None:
params = {}
params.setdefault('allow_webp', not self._params.get('disallow_webp', None))
params.setdefault('referer', self._storage.get('referer', self.domain))
params.setdefault('user_agent', self._get_user_agent())
params.setdefault('proxies', self._storage.get('proxies', None))
params.setdefault('cookies', self._storage.get('cookies', None))
params.setdefault('kwargs', self._http_kwargs)
return params
def http(self, new=False, params=None) -> Http:
http_params = self._build_http_params(params)
if new:
http = Http(**http_params)
return http
elif not self.__http:
self.__http = Http(**http_params)
return self.__http
def http_get(self, url: str, headers: dict = None, cookies: dict = None):
return self.http().get(url=url, headers=headers, cookies=cookies)
def http_post(self, url: str, headers: dict = None, cookies: dict = None, data=()):
return self.http().post(url=url, headers=headers, cookies=cookies, data=data)
def _get_user_agent(self):
ua_storage = self._storage.get('user_agent', None)
ua_params = self._params.get('user_agent', None)
if self._params.get('cf_scrape', False):
return ua_storage
return ua_params
@classmethod
def __normalize_chapters(cls, n, element):
if isinstance(element, HtmlElement):
return n(element.get('href'))
if isinstance(element, str):
return n(element)
return element
def _prepare_chapters(self, chapters):
n = self.http().normalize_uri
items = []
if chapters and len(chapters):
for i in chapters:
url = self.__normalize_chapters(n, i)
items.append(url)
else:
warning('Chapters list empty. Check %s' % self.get_url())
return items
def get_current_file(self):
return self._storage['files'][self._storage['current_file']]
def book_meta(self) -> dict:
return {}
def _image_name(self, idx, filename):
if idx is None:
idx = self._storage['current_file']
fn, extension = path.splitext(filename)
_path = '{:0>3}_{}'.format(idx, fn)
if self._params['rename_pages']:
_path = '{:0>3}'.format(idx)
return _path + extension
def chapter_for_json(self) -> str:
return self.chapter
def put_info_json(self, meta):
# manga_name, url, directory
pass
def _fill_arguments(self, arguments: List[str]):
know_args = [
'login',
'password',
'language',
'translator',
]
if self.__arguments is None:
self.__arguments = {}
for arg in arguments:
key, value = arg.split('=', 1) # type: str, str
if key in know_args:
self.__arguments[key] = value
def arg(self, key: str) -> Optional[str]:
if self.__arguments is None:
return None
return self.__arguments.get(key)
def allow_auto_change_url(self):
return True
| 29.748344 | 87 | 0.573241 | [
"MIT"
] | Ever4engel/manga-py | manga_py/base_classes/base.py | 4,492 | Python |
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'aa!b!ug6opqr*_f60k&%orwoqus_ecvlgjtsn0y)c)1o7-_at&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'myApp'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'myProject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'myProject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
] | 25.517857 | 91 | 0.689293 | [
"MIT"
] | anthonyc1/django-materialize-boilerplate | myProject/settings.py | 2,858 | Python |
# coding: utf-8
"""
Genomic Data Store Service
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from libica.openapi.libgds.configuration import Configuration
class FolderUpdateRequest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'metadata': 'object',
'acl': 'list[str]'
}
attribute_map = {
'metadata': 'metadata',
'acl': 'acl'
}
def __init__(self, metadata=None, acl=None, local_vars_configuration=None): # noqa: E501
"""FolderUpdateRequest - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._metadata = None
self._acl = None
self.discriminator = None
if metadata is not None:
self.metadata = metadata
if acl is not None:
self.acl = acl
@property
def metadata(self):
"""Gets the metadata of this FolderUpdateRequest. # noqa: E501
Metadata about this folder and its contents # noqa: E501
:return: The metadata of this FolderUpdateRequest. # noqa: E501
:rtype: object
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this FolderUpdateRequest.
Metadata about this folder and its contents # noqa: E501
:param metadata: The metadata of this FolderUpdateRequest. # noqa: E501
:type: object
"""
self._metadata = metadata
@property
def acl(self):
"""Gets the acl of this FolderUpdateRequest. # noqa: E501
Optional array to replace the acl on the resource. # noqa: E501
:return: The acl of this FolderUpdateRequest. # noqa: E501
:rtype: list[str]
"""
return self._acl
@acl.setter
def acl(self, acl):
"""Sets the acl of this FolderUpdateRequest.
Optional array to replace the acl on the resource. # noqa: E501
:param acl: The acl of this FolderUpdateRequest. # noqa: E501
:type: list[str]
"""
self._acl = acl
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, FolderUpdateRequest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, FolderUpdateRequest):
return True
return self.to_dict() != other.to_dict()
| 28.503311 | 124 | 0.58434 | [
"MIT"
] | umccr-illumina/libica | libica/openapi/libgds/models/folder_update_request.py | 4,304 | Python |
import numpy as np
from mendeleev import element as md_element
from basisopt import api, data
from basisopt.exceptions import PropertyNotAvailable
from basisopt.basis import even_temper_expansion
from basisopt.basis.guesses import null_guess
from .preconditioners import unit
from .strategies import Strategy
_INITIAL_GUESS = (0.3, 2.0, 8)
class EvenTemperedStrategy(Strategy):
""" Implements a strategy for an even tempered basis set, where each angular
momentum shell is described by three parameters: (c, x, n)
Each exponent in that shell is then given by
y_k = c*(x**k) for k=0,...,n
--------------------------- ALGORITHM ----------------------------
Evaluate: energy (can change to any RMSE-compatible property)
Loss: root-mean-square error
Guess: null, uses _INITIAL_GUESS above
Pre-conditioner: None
Initialisation:
- Find minimum no. of shells needed
- max_l >= min_l
- generate initial parameters for each shell
First run:
- optimize parameters for each shell once, sequentially
Next shell in list not marked finished:
- re-optimise
- below threshold or n=max_n: mark finished
- above threshold: increment n
Repeat until all shells are marked finished.
Uses iteration, limited by two parameters:
max_n: max number of exponents in shell
target: threshold for objective function
------------------------------------------------------------------
Additional attributes:
shells (list): list of (c, x, n) parameter tuples
shell_done (list): list of flags for whether shell is finished (0) or not (1)
target (float): threshold for optimization delta
max_n (int): maximum number of primitives in shell expansion
max_l (int): maximum angular momentum shell to do;
if -1, does minimal configuration
first_run (bool): setting to True restarts optimization from beginning
last_objective (var): last value of objective function
"""
def __init__(self, eval_type='energy', target=1e-5, max_n=18, max_l=-1):
Strategy.__init__(self, eval_type=eval_type, pre=unit)
self.name = 'EvenTemper'
self.shells = []
self.shell_done = []
self.last_objective = 0
self.target = target
self.guess = null_guess
self.guess_params = {}
self.max_n = max_n
self.max_l = max_l
self.first_run = True
def set_basis_shells(self, basis, element):
"""Expands parameters into a basis set"""
basis[element] = even_temper_expansion(self.shells)
def initialise(self, basis, element):
if self.max_l < 0:
el = md_element(element.title())
l_list = [l for (n, l) in el.ec.conf.keys()]
min_l = len(set(l_list))
self.max_l = max(min_l, self.max_l)
self.shells = [_INITIAL_GUESS] * self.max_l
self.shell_done = [1] * self.max_l
self.set_basis_shells(basis, element)
self.last_objective = 0
def get_active(self, basis, element):
(c, x, _) = self.shells[self._step]
return np.array([c, x])
def set_active(self, values, basis, element):
(c, x, n) = self.shells[self._step]
c = max(values[0], 1e-5)
x = max(values[1], 1.01)
self.shells[self._step] = (c, x, n)
self.set_basis_shells(basis, element)
def next(self, basis, element, objective):
delta_objective = np.abs(self.last_objective - objective)
self.last_objective = objective
carry_on = True
if self.first_run:
self._step = self._step + 1
if self._step == self.max_l:
self.first_run = False
self._step = 0
(c, x, n) = self.shells[self._step]
self.shells[self._step] = (c, x, min(n+1, self.max_n))
else:
if delta_objective < self.target:
self.shell_done[self._step] = 0
self._step = (self._step + 1) % self.max_l
(c, x, n) = self.shells[self._step]
if n == self.max_n:
self.shell_done[self._step] = 0
elif self.shell_done[self._step] != 0:
self.shells[self._step] = (c, x, n+1)
carry_on = np.sum(self.shell_done) != 0
return carry_on
| 37.758065 | 89 | 0.566852 | [
"MIT"
] | robashaw/basisopt | basisopt/opt/eventemper.py | 4,682 | Python |
import os
import sys
sys.path.append(os.getcwd())
import numpy as np
import torch
import flow
from utils import cdfDiscreteLogitstic, cdfMixDiscreteLogistic
from utils import logDiscreteLogistic, logMixDiscreteLogistic
nbins = 4096
_bins = torch.arange(-nbins // 2, nbins // 2).reshape(-1, 1, 1, 1, 1)
decimal = flow.ScalingNshifting(256, -128)
def test_disLogisticCDF():
logscale = torch.tensor(
[[[[-3.6826, -3.0157, -3.6032],
[-3.7063, -3.0269, -3.5338],
[-3.5311, -2.9907, -3.3516],
[-3.9300, -3.3121, -3.8110]],
[[-3.1022, -3.0692, -3.2039],
[-2.9466, -3.0006, -3.2969],
[-2.7636, -2.5691, -2.9628],
[-3.3657, -3.2948, -3.5318]],
[[-3.9748, -3.0670, -3.2399],
[-3.9312, -3.0055, -3.1729],
[-3.8588, -2.9139, -3.1794],
[-4.1534, -3.2404, -3.5665]]]]
)
mean = torch.tensor(
[[[[ 0.0191, 0.0459, 0.0131],
[-0.0059, 0.0254, -0.0100],
[ 0.0359, 0.0406, 0.0242],
[ 0.0331, 0.0438, 0.0255]],
[[ 0.0214, 0.0502, 0.0622],
[ 0.0371, 0.0368, 0.0517],
[ 0.0217, 0.0855, 0.0874],
[ 0.0144, 0.0475, 0.0470]],
[[-0.0602, -0.0791, -0.0784],
[-0.0443, -0.0765, -0.0701],
[-0.0654, -0.0709, -0.0788],
[-0.0608, -0.0721, -0.0688]]]]
)
bins = _bins - 1 + torch.round(decimal.forward_(mean))
cdf = cdfDiscreteLogitstic(bins, mean, logscale, decimal=decimal).detach().numpy()
pList = []
for i in range(bins.shape[0]):
logp = logDiscreteLogistic(bins[i: i + 1], mean, logscale, decimal=decimal).detach().numpy()
pList.append(np.exp(logp).reshape(mean.shape))
pList = np.array(pList)
_cdf = np.cumsum(pList, 0)
assert np.allclose(cdf, _cdf)
def test_mixDixLogisticCDF():
mean = torch.tensor(
[[[[-0.2414, 0.2089, -0.0209, -0.1279]],
[[ 0.7791, 0.1031, 0.0940, 0.1678]],
[[ 0.0095, 0.0391, -0.0318, -0.2183]]],
[[[-0.1466, 0.2090, -0.0594, -0.0837]],
[[ 0.8711, 0.0540, 0.0940, 0.0859]],
[[-0.0683, -0.0204, -0.0340, -0.0587]]],
[[[-0.1994, -0.0442, -0.0307, -0.0823]],
[[ 1.0158, 0.0636, 0.0832, 0.0717]],
[[-0.1863, -0.0177, -0.0293, -0.0708]]],
[[[-0.3517, 0.1062, -0.0362, -0.1661]],
[[ 0.6567, 0.1452, 0.0294, 0.0864]],
[[-0.1384, -0.0171, -0.0195, -0.0710]]],
[[[-0.3158, 0.2068, 0.1114, -0.1251]],
[[ 0.5600, 0.1987, 0.1891, 0.1754]],
[[-0.2758, -0.1032, -0.0435, -0.1156]]]])
logscale = torch.tensor(
[[[[-3.1292, -4.0168, -3.2886, -2.5948]],
[[-2.8226, -2.3489, -2.8613, -2.3892]],
[[-3.3502, -3.4929, -2.9572, -2.7060]]],
[[[-3.4556, -4.0166, -2.7471, -3.1203]],
[[-2.6906, -3.6062, -2.8620, -3.0673]],
[[-3.2775, -3.3661, -3.2897, -4.0553]]],
[[[-3.4652, -3.3828, -3.3053, -3.6945]],
[[-2.7657, -2.9172, -3.4067, -3.7734]],
[[-3.4817, -3.0397, -2.8021, -3.1398]]],
[[[-2.7246, -3.7798, -4.1237, -2.8605]],
[[-3.0524, -2.6628, -2.4833, -3.0913]],
[[-4.0249, -3.8364, -3.7608, -2.7111]]],
[[[-3.5460, -4.0208, -2.9837, -3.1288]],
[[-3.2062, -2.1702, -2.2238, -2.6122]],
[[-3.1754, -3.0892, -2.3359, -2.4321]]]])
mixing = torch.tensor(
[[[[ 1.3161, 0.8664, 1.7648, -0.7598, -0.8658],
[-3.7472, -3.6553, 5.2783, 0.2242, -3.6304],
[-0.7378, 0.2730, 1.8044, 0.7450, -1.6218],
[-0.8105, 1.8833, 1.8243, -0.7879, -1.1211]]],
[[[ 1.3952, -0.8232, -1.0135, 1.8041, 0.9846],
[-0.4372, 1.1296, 1.5473, -0.0661, -0.5995],
[-0.5167, 1.5559, 1.2607, -0.3227, -0.8687],
[-0.6226, 1.5024, 1.4221, 1.4741, -0.4409]]],
[[[ 1.3045, 1.8551, 0.1755, -0.6253, -1.2045],
[-0.9858, 1.5529, -0.6332, 1.4569, -1.1089],
[-0.5954, 1.2305, 1.4068, 0.7919, -0.3811],
[-0.2997, 0.6804, 2.0660, 1.1353, -0.9155]]]])
bins = _bins - 1 + torch.round(decimal.forward_(mean.permute([1, 2, 3, 0])) * mixing).sum(-1).reshape(1, *mean.shape[1:])
cdf = cdfMixDiscreteLogistic(bins, mean, logscale, mixing, decimal=decimal)
pList = []
for i in range(bins.shape[0]):
logp = logMixDiscreteLogistic(bins[i: i + 1], mean, logscale, mixing, decimal=decimal).detach().numpy()
pList.append(np.exp(logp).reshape(logp.shape[1:]))
pList = np.array(pList)
_cdf = np.cumsum(pList, 0)
assert np.allclose(cdf, _cdf)
if __name__ == "__main__":
test_disLogisticCDF()
test_mixDixLogisticCDF() | 36.19084 | 125 | 0.494832 | [
"Apache-2.0"
] | li012589/NeuralWavelet | test/test_cdf.py | 4,741 | Python |
# Copyright 2019 TerraPower, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import random
import shutil
import string
import armi
from armi import runLog
from armi.utils import pathTools
def _changeDirectory(destination):
if os.path.exists(destination):
os.chdir(destination)
else:
raise IOError(
"Cannot change directory to non-existent location: {}".format(destination)
)
class DirectoryChanger(object):
"""
Utility to change directory.
Parameters
----------
destination : str
Path of directory to change into
filesToMove : list of str, optional
Filenames to bring from the CWD into the destination
filesToRetrieve : list of str, optional
Filenames to bring back from the destination to the cwd
dumpOnException : bool, optional
Flag to tell system to retrieve the entire directory if an exception
is raised within a the context manager.
Use with 'with' statements to execute code in a different dir, guaranteeing a clean
return to the original directory
>>> with DirectoryChanger('C:\\whatever')
... pass
"""
def __init__(
self, destination, filesToMove=None, filesToRetrieve=None, dumpOnException=True
):
"""Establish the new and return directories"""
self.initial = pathTools.armiAbsPath(os.getcwd())
self.destination = None
if destination is not None:
self.destination = pathTools.armiAbsPath(destination)
self._filesToMove = filesToMove or []
self._filesToRetrieve = filesToRetrieve or []
self._dumpOnException = dumpOnException
def __enter__(self):
"""At the inception of a with command, navigate to a new directory if one is supplied."""
runLog.debug("Changing directory to {}".format(self.destination))
self.moveFiles()
self.open()
return self
def __exit__(self, exc_type, exc_value, traceback):
"""At the termination of a with command, navigate back to the original directory."""
runLog.debug("Returning to directory {}".format(self.initial))
if exc_type is not None and self._dumpOnException:
runLog.info(
"An exception was raised within a DirectoryChanger. "
"Retrieving entire folder for debugging."
)
self._retrieveEntireFolder()
else:
self.retrieveFiles()
self.close()
def __repr__(self):
"""Print the initial and destination paths"""
return "<{} {} to {}>".format(
self.__class__.__name__, self.initial, self.destination
)
def open(self):
"""
User requested open, used to stalling the close from a with statement.
This method has been made for old uses of :code:`os.chdir()` and is not
recommended. Please use the with statements
"""
if self.destination:
_changeDirectory(self.destination)
def close(self):
"""User requested close."""
if self.initial != os.getcwd():
_changeDirectory(self.initial)
def moveFiles(self):
initialPath = self.initial
destinationPath = self.destination
self._transferFiles(initialPath, destinationPath, self._filesToMove)
def retrieveFiles(self):
"""Retrieve any desired files."""
initialPath = self.destination
destinationPath = self.initial
fileList = self._filesToRetrieve
self._transferFiles(initialPath, destinationPath, fileList)
def _retrieveEntireFolder(self):
"""Retrieve all files."""
initialPath = self.destination
destinationPath = self.initial
folderName = os.path.split(self.destination)[1]
destinationPath = os.path.join(destinationPath, f"dump-{folderName}")
fileList = os.listdir(self.destination)
self._transferFiles(initialPath, destinationPath, fileList)
@staticmethod
def _transferFiles(initialPath, destinationPath, fileList):
"""
Transfer files into or out of the directory.
.. warning:: On Windows the max number of characters in a path is 260.
If you exceed this you will see FileNotFound errors here.
"""
if not fileList:
return
if not os.path.exists(destinationPath):
os.mkdir(destinationPath)
for ff in fileList:
if isinstance(ff, tuple):
# allow renames in transit
fromName, destName = ff
else:
fromName, destName = ff, ff
fromPath = os.path.join(initialPath, fromName)
toPath = os.path.join(destinationPath, destName)
runLog.extra("Copying {} to {}".format(fromPath, toPath))
shutil.copy(fromPath, toPath)
class TemporaryDirectoryChanger(DirectoryChanger):
"""
Create temporary directory, changes into it, and if there is no error/exception
generated when using a :code:`with` statement, it deletes the directory.
Notes
-----
If there is an error/exception generated while in a :code:`with` statement, the
temporary directory contents will be copied to the original directory and then the
temporary directory will be deleted.
"""
_home = armi.context.FAST_PATH
def __init__(
self, root=None, filesToMove=None, filesToRetrieve=None, dumpOnException=True
):
DirectoryChanger.__init__(
self, root, filesToMove, filesToRetrieve, dumpOnException
)
root = root or TemporaryDirectoryChanger._home
if not os.path.exists(root):
os.makedirs(root)
self.initial = os.path.abspath(os.getcwd())
self.destination = TemporaryDirectoryChanger.GetRandomDirectory(root)
while os.path.exists(self.destination):
self.destination = TemporaryDirectoryChanger.GetRandomDirectory(root)
@classmethod
def GetRandomDirectory(cls, root):
return os.path.join(
root,
"temp-"
+ "".join(
random.choice(string.ascii_letters + string.digits) for _ in range(10)
),
)
def __enter__(self):
os.mkdir(self.destination)
return DirectoryChanger.__enter__(self)
def __exit__(self, exc_type, exc_value, traceback):
DirectoryChanger.__exit__(self, exc_type, exc_value, traceback)
shutil.rmtree(self.destination)
class ForcedCreationDirectoryChanger(DirectoryChanger):
"""
Creates the directory tree necessary to reach your desired destination
Attributes
----------
clean : bool
if True and the directory exists, clear all contents on entry.
"""
def __init__(
self,
destination,
filesToMove=None,
filesToRetrieve=None,
dumpOnException=True,
clean=False,
):
DirectoryChanger.__init__(
self, destination, filesToMove, filesToRetrieve, dumpOnException
)
self.clean = clean
def __enter__(self):
if not os.path.exists(self.destination):
runLog.debug(f"Creating destination folder {self.destination}")
try:
os.makedirs(self.destination)
except OSError:
# even though we checked exists, this still fails
# sometimes when multiple MPI nodes try
# to make the dirs due to I/O delays
runLog.debug(f"Failed to make destination folder")
else:
runLog.debug(f"Destination folder already exists: {self.destination}")
DirectoryChanger.__enter__(self)
if self.clean:
shutil.rmtree(".", ignore_errors=True)
return self
def directoryChangerFactory():
if armi.MPI_SIZE > 1:
from .directoryChangersMpi import MpiDirectoryChanger
return MpiDirectoryChanger
else:
return DirectoryChanger
| 33.766798 | 97 | 0.64626 | [
"Apache-2.0"
] | sammiller11235/armi | armi/utils/directoryChangers.py | 8,543 | Python |
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
import os
import zipfile
from dataclasses import dataclass
from io import BytesIO
from typing import Iterable
from pants.backend.python.subsystems.setuptools import PythonDistributionFieldSet
from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints
from pants.backend.python.util_rules.pex import Pex, PexRequest, PexRequirements
from pants.backend.python.util_rules.pex import rules as pex_rules
from pants.backend.python.util_rules.python_sources import PythonSourceFiles
from pants.build_graph.address import Address
from pants.core.goals.package import BuiltPackage, PackageFieldSet
from pants.core.util_rules.source_files import SourceFiles
from pants.engine.addresses import Addresses
from pants.engine.fs import (
EMPTY_SNAPSHOT,
Digest,
DigestContents,
DigestSubset,
MergeDigests,
PathGlobs,
Snapshot,
)
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import TransitiveTargets, TransitiveTargetsRequest
from pants.util.docutil import doc_url
from pants.util.meta import frozen_after_init
logger = logging.getLogger(__name__)
@frozen_after_init
@dataclass(unsafe_hash=True)
class LocalDistsPexRequest:
"""Request to build the local dists from the dependency closure of a set of addresses."""
addresses: Addresses
interpreter_constraints: InterpreterConstraints
# The result will return these with the sources provided by the dists subtracted out.
# This will help the caller prevent sources from appearing twice on sys.path.
sources: PythonSourceFiles
def __init__(
self,
addresses: Iterable[Address],
*,
interpreter_constraints: InterpreterConstraints = InterpreterConstraints(),
sources: PythonSourceFiles = PythonSourceFiles(
SourceFiles(EMPTY_SNAPSHOT, tuple()), tuple()
),
) -> None:
self.addresses = Addresses(addresses)
self.interpreter_constraints = interpreter_constraints
self.sources = sources
@dataclass(frozen=True)
class LocalDistsPex:
"""A PEX file containing locally-built dists.
Can be consumed from another PEX, e.g., by adding to PEX_PATH.
Lists the files provided by the dists on sys.path, so they can be subtracted from
sources digests, to prevent the same file ending up on sys.path twice.
"""
pex: Pex
# The sources from the request, but with any files provided by the local dists subtracted out.
remaining_sources: PythonSourceFiles
@rule(desc="Building local distributions")
async def build_local_dists(
request: LocalDistsPexRequest,
) -> LocalDistsPex:
transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest(request.addresses))
applicable_targets = [
tgt for tgt in transitive_targets.closure if PythonDistributionFieldSet.is_applicable(tgt)
]
python_dist_field_sets = [
PythonDistributionFieldSet.create(target) for target in applicable_targets
]
dists = await MultiGet(
[Get(BuiltPackage, PackageFieldSet, field_set) for field_set in python_dist_field_sets]
)
# The primary use-case of the "local dists" feature is to support consuming native extensions
# as wheels without having to publish them first.
# It doesn't seem very useful to consume locally-built sdists, and it makes it hard to
# reason about possible sys.path collisions between the in-repo sources and whatever the
# sdist will place on the sys.path when it's installed.
# So for now we simply ignore sdists, with a warning if necessary.
provided_files = set()
wheels = []
all_contents = await MultiGet(Get(DigestContents, Digest, dist.digest) for dist in dists)
for dist, contents, tgt in zip(dists, all_contents, applicable_targets):
artifacts = set((a.relpath or "") for a in dist.artifacts)
# A given local dist might build a wheel and an sdist (and maybe other artifacts -
# we don't know what setup command was run...)
# As long as there is a wheel, we can ignore the other artifacts.
wheel = next((art for art in artifacts if art.endswith(".whl")), None)
if wheel:
wheel_content = next(content for content in contents if content.path == wheel)
wheels.append(wheel)
buf = BytesIO()
buf.write(wheel_content.content)
buf.seek(0)
with zipfile.ZipFile(buf) as zf:
provided_files.update(zf.namelist())
else:
logger.warning(
f"Encountered a dependency on the {tgt.alias} target at {tgt.address.spec}, but "
"this target does not produce a Python wheel artifact. Therefore this target's "
"code will be used directly from sources, without a distribution being built, "
"and therefore any native extensions in it will not be built.\n\n"
f"See {doc_url('python-distributions')} for details on how to set up a {tgt.alias} "
"target to produce a wheel."
)
dists_digest = await Get(Digest, MergeDigests([dist.digest for dist in dists]))
wheels_digest = await Get(Digest, DigestSubset(dists_digest, PathGlobs(["**/*.whl"])))
dists_pex = await Get(
Pex,
PexRequest(
output_filename="local_dists.pex",
requirements=PexRequirements(wheels),
interpreter_constraints=request.interpreter_constraints,
additional_inputs=wheels_digest,
internal_only=True,
),
)
# We check source roots in reverse lexicographic order,
# so we'll find the innermost root that matches.
source_roots = list(reversed(sorted(request.sources.source_roots)))
remaining_sources = set(request.sources.source_files.files)
unrooted_files_set = set(request.sources.source_files.unrooted_files)
for source in request.sources.source_files.files:
if source not in unrooted_files_set:
for source_root in source_roots:
if (
source.startswith(source_root)
and os.path.relpath(source, source_root) in provided_files
):
remaining_sources.remove(source)
remaining_sources_snapshot = await Get(
Snapshot,
DigestSubset(
request.sources.source_files.snapshot.digest, PathGlobs(sorted(remaining_sources))
),
)
subtracted_sources = PythonSourceFiles(
SourceFiles(remaining_sources_snapshot, request.sources.source_files.unrooted_files),
request.sources.source_roots,
)
return LocalDistsPex(dists_pex, subtracted_sources)
def rules():
return (*collect_rules(), *pex_rules())
| 40.045977 | 100 | 0.708668 | [
"Apache-2.0"
] | chebbyChefNEQ/pants | src/python/pants/backend/python/util_rules/local_dists.py | 6,968 | Python |
from fastai.data.all import IntToFloatTensor
from fastai.vision.learner import *
from fastai.vision.augment import *
from fastai.vision.core import PILImageBW, PILImage
from fastai.vision.data import *
from preprocessing.transforms import *
# from preprocessing.dicom import *
from preprocessing.misc import *
def get_item_tfms(run_params):
item_tfms = []
# if run_params['HIST_CLIPPING']:
# item_tfms.append(XRayPreprocess(PIL_cls=PILImageBW, cut_min=run_params['HIST_CLIPPING_CUT_MIN'], cut_max=run_params['HIST_CLIPPING_CUT_MAX'], np_input=len(item_tfms) > 0, np_output=True))
if run_params["KNEE_LOCALIZER"]:
item_tfms.append(
KneeLocalizer(
run_params["KNEE_SVM_MODEL_PATH"],
PIL_cls=PILImageBW,
resize=run_params["RESIZE"],
np_input=len(item_tfms) > 0,
np_output=True,
)
)
else:
item_tfms.append(
Resize(
run_params["RESIZE"], method=ResizeMethod.Pad, pad_mode=PadMode.Zeros
)
)
if run_params["BACKGROUND_PREPROCESS"]:
item_tfms.append(
BackgroundPreprocess(
PIL_cls=PILImageBW, np_input=len(item_tfms) > 0, np_output=True
)
)
# item_tfms.append(RandomResizedCrop(RANDOM_RESIZE_CROP))
# Histogram scaling DICOM on the fly
if run_params["CLAHE_SCALED"]:
item_tfms.append(
CLAHE_Transform(
PIL_cls=PILImageBW,
grayscale=not run_params["SELF_SUPERVISED"],
np_input=len(item_tfms) > 0,
np_output=False,
)
)
elif run_params["HIST_SCALED"]:
if run_params["HIST_SCALED_SELF"]:
bins = None
else:
# bins = init_bins(fnames=L(list(final_df['Original'].values)), n_samples=100)
all_valid_raw_preprocess = pd.concat(
[pd.Series(unlabel_all_df.index), label_df["Raw_preprocess"]]
)
bins = init_bins(
fnames=L(list(all_valid_raw_preprocess.values)),
n_samples=100,
isDCM=False,
)
# item_tfms.append(HistScaled(bins))
item_tfms.append(HistScaled_all(bins))
return item_tfms
def get_batch_tfms(run_params):
label_tfms = [
IntToFloatTensor(div=2 ** 16 - 1),
*aug_transforms(
pad_mode=PadMode.Zeros,
mult=1.0,
do_flip=True,
flip_vert=False,
max_rotate=90.0,
min_zoom=0.9,
max_zoom=1.2,
max_lighting=0.4,
max_warp=0.4,
p_affine=0.9,
p_lighting=0.9,
mode="bilinear",
align_corners=True,
),
RandomResizedCropGPU(
run_params["RANDOM_RESIZE_CROP"], min_scale=run_params["RANDOM_MIN_SCALE"]
),
# Normalize() # Issue with CPU vs GPU interaction
]
unlabel_tfms = [[IntToFloatTensor(div=2 ** 16 - 1)]]
if run_params["SSL"] == run_params["SSL_FIX_MATCH"]:
weak_transform = [
IntToFloatTensor(div=1),
RandomResizedCropGPU(
run_params["RANDOM_RESIZE_CROP"],
min_scale=run_params["RANDOM_MIN_SCALE"],
),
Flip(),
# Normalize()
]
unlabel_tfms.append(weak_transform)
strong_transform = [
IntToFloatTensor(div=1),
RandomResizedCropGPU(
run_params["RANDOM_RESIZE_CROP"],
min_scale=run_params["RANDOM_MIN_SCALE"],
),
Flip(),
Rotate(180),
Brightness(),
Contrast(),
RandomErasing(),
# Normalize()
]
unlabel_tfms.append(strong_transform)
elif run_params["SSL"] == run_params["SSL_MIX_MATCH"]:
unlabel_transform = [
IntToFloatTensor(div=2 ** 16 - 1),
RandomResizedCropGPU(
run_params["RANDOM_RESIZE_CROP"],
min_scale=run_params["RANDOM_MIN_SCALE"],
),
Flip(),
Rotate(180),
Brightness(),
Contrast(),
# Normalize()
]
unlabel_tfms.append(unlabel_transform)
return label_tfms, unlabel_tfms
| 30.303448 | 197 | 0.561447 | [
"MIT"
] | lluissalord/radiology_ai | train/tfms.py | 4,394 | Python |
import sys,os
try:
import argparse
except ImportError:
print("""ERROR: Could not import argparse
Either use python2.7 or later (perhaps in a strange location such as
/bgsys/tools/python2.7.5-gnu-20130730/bin/hostpython) or install from
PyPI (https://pypi.python.org/pypi/argparse/).""")
sys.exit(1)
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == os.errno.EEXIST:
pass
else: raise
def main():
parser = argparse.ArgumentParser(description='Configure High-performance Geometric Multigrid (HPGMG)')
parser.add_argument('--arch', help='Name of this configuration', default=None)
parser.add_argument('--petsc-dir', help='PETSC_DIR', default=os.environ.get('PETSC_DIR',''))
parser.add_argument('--petsc-arch', help='PETSC_ARCH', default=os.environ.get('PETSC_ARCH',''))
parser.add_argument('--with-hpm', help='libHPM profiling library on Blue Gene ("1" or "/path/to/libmpihpm.a /path/to/libbgpm.a")')
cf = parser.add_argument_group('Compilers and flags')
cf.add_argument('--CC', help='Path to C compiler', default=os.environ.get('CC',''))
cf.add_argument('--CFLAGS', help='Flags for C compiler', default=os.environ.get('CFLAGS',''))
cf.add_argument('--CPPFLAGS', help='Flags for C preprocessor', default=os.environ.get('CPPFLAGS',''))
cf.add_argument('--LDFLAGS', help='Flags to pass to linker', default=os.environ.get('LDFLAGS',''))
cf.add_argument('--LDLIBS', help='Libraries to pass to linker', default=os.environ.get('LDLIBS',''))
fe = parser.add_argument_group('Finite Element options')
fe.add_argument('--fe', action='store_true', dest='fe', help='Build the Finite-Element solver')
fv = parser.add_argument_group('Finite Volume options')
fv.add_argument('--no-fv', action='store_false', dest='fv', help='Do not build the Finite-Volume solver')
fv.add_argument('--no-fv-mpi', action='store_false', dest='fv_mpi', help='Use MPI')
fv.add_argument('--fv-cycle', help='Multigrid cycle type', choices=['V','F','U'], default='F')
fv.add_argument('--no-fv-subcomm', action='store_false', dest='fv_subcomm', help='Build a subcommunicator for each level in the MG v-cycle to minimize the scope of MPI_AllReduce()')
fv.add_argument('--fv-coarse-solver', help='Use BiCGStab as a bottom (coarse grid) solver', choices=['bicgstab','cabicgstab','cg','cacg'], default='bicgstab')
fv.add_argument('--fv-smoother', help='Multigrid smoother', choices=['cheby','gsrb','jacobi','l1jacobi'], default='gsrb')
args = parser.parse_args()
if args.arch is None:
args.arch = args.petsc_arch
if not args.arch:
args.arch = 'build'
mkdir_p(args.arch)
configure(args)
def configure(args):
open(os.path.join(args.arch,'Makefile'), 'w').write(makefile(args))
reconfname = os.path.join(args.arch,'reconfigure-%s.py' % args.arch)
open(reconfname, 'w').write('\n'.join([
'#!'+sys.executable,
'import os,sys',
'from argparse import Namespace',
"sys.path.insert(0, os.path.abspath('.'))",
'import hpgmgconf',
'hpgmgconf.configure(%r)' % args,
]))
os.chmod(reconfname,0o755)
print('Configuration complete in: %s' % os.path.realpath(args.arch))
print('To build: make -j3 -C %s' % args.arch)
def makefile(args):
if args.CC:
CC = args.CC
else:
if args.petsc_dir:
CC = '$(PCC)'
else:
CC = 'mpicc'
m = ['HPGMG_ARCH = %s' % args.arch,
'HPGMG_CC = %s' % CC,
'HPGMG_CFLAGS = %s' % (args.CFLAGS if args.CFLAGS else ('$(PCC_FLAGS) ' if args.petsc_dir else '')),
'HPGMG_CPPFLAGS = %s' % (('$(CCPPFLAGS) ' if args.petsc_dir else '') + args.CPPFLAGS),
'HPGMG_LDFLAGS = %s' % args.LDFLAGS,
'HPGMG_LDLIBS = %s' % args.LDLIBS,
'PETSC_DIR = %s' % args.petsc_dir,
'PETSC_ARCH = %s' % args.petsc_arch,
'PYTHON = %s' % sys.executable,
'SRCDIR = %s' % os.path.abspath(os.path.dirname(__name__)),]
if args.with_hpm:
m.append('CONFIG_HPM = y')
hpm_lib = args.with_hpm
try:
hpm_lib = int(hpm_lib)
except:
pass
if not isinstance(hpm_lib,str): # ALCF location
hpm_lib = '/soft/perftools/hpctw/lib/libmpihpm.a /bgsys/drivers/ppcfloor/bgpm/lib/libbgpm.a'
for p in hpm_lib.split():
assert os.path.exists(p), "HPM path '%s' not found" % p
m.append('HPGMG_LDLIBS += ' + hpm_lib)
m.append('HPGMG_CPPFLAGS += -DUSE_HPM=1')
if args.fv:
m.append('CONFIG_FV = y')
if args.fe and args.petsc_dir:
m.append('CONFIG_FE = y')
m.append('CONFIG_FV_CPPFLAGS = ' + hpgmg_fv_cflags(args))
if args.petsc_dir:
found = False
for variables_path in [os.path.join('lib', 'petsc', 'conf', 'variables'),
os.path.join('lib', 'petsc-conf', 'variables'),
os.path.join('conf', 'variables')]:
if os.path.exists(os.path.join(args.petsc_dir,variables_path)):
m.append('include $(PETSC_DIR)/' + variables_path)
found = True
if not found:
raise RuntimeError('Could not find PETSc variables file in PETSC_DIR=%s' % (args.petsc_dir,))
m.append('include $(SRCDIR)/base.mk\n')
return '\n'.join(m)
def hpgmg_fv_cflags(args):
defines = []
if args.fv_mpi:
defines.append('USE_MPI')
defines.append('USE_%s' % args.fv_coarse_solver.upper())
if args.fv_subcomm:
defines.append('USE_SUBCOMM')
defines.append('USE_%sCYCLES' % args.fv_cycle.upper())
defines.append('USE_%s' % args.fv_smoother.upper())
#defines.append('STENCIL_FUSE_DINV') # generally only good on compute-intensive architectures with good compilers
#defines.append('STENCIL_FUSE_BC')
return ' '.join('-D%s=1'%d for d in defines)
| 48.176 | 185 | 0.622551 | [
"Unlicense"
] | kyushick/cdruntime | examples/hpgmg/hpgmgconf.py | 6,022 | Python |
from typing import Any
from typing import Mapping
from pandas.core.frame import DataFrame
from pyspark.sql import SparkSession
from pyspark.sql.types import StructType
from cishouseholds.pipeline.config import get_config
sessions = {
"s": (
SparkSession.builder.config("spark.executor.memory", "1g")
.config("spark.executor.cores", 1)
.config("spark.dynamicAllocation.enabled", "true")
.config("spark.dynamicAllocation.maxExecutors", 3)
.config("spark.sql.shuffle.partitions", 12)
.config("spark.ui.showConsoleProgress", "false")
.config("spark.sql.legacy.allowCreatingManagedTableUsingNonemptyLocation", "true")
.config("spark.shuffle.service.enabled", "true")
.config("spark.sql.crossJoin.enabled", "true")
.appName("cishouseholds")
.enableHiveSupport()
.getOrCreate()
),
"m": (
SparkSession.builder.config("spark.executor.memory", "6g")
.config("spark.executor.cores", 3)
.config("spark.dynamicAllocation.enabled", "true")
.config("spark.dynamicAllocation.maxExecutors", 3)
.config("spark.sql.shuffle.partitions", 18)
.config("spark.ui.showConsoleProgress", "false")
.config("spark.sql.legacy.allowCreatingManagedTableUsingNonemptyLocation", "true")
.config("spark.shuffle.service.enabled", "true")
.config("spark.debug.maxToStringFields", 2000)
.config("spark.sql.crossJoin.enabled", "true")
.appName("cishouseholds")
.enableHiveSupport()
.getOrCreate()
),
"l": (
SparkSession.builder.config("spark.executor.memory", "10g")
.config("spark.yarn.executor.memoryOverhead", "1g")
.config("spark.executor.cores", 5)
.config("spark.dynamicAllocation.enabled", "true")
.config("spark.dynamicAllocation.maxExecutors", 5)
.config("spark.sql.shuffle.partitions", 200)
.config("spark.ui.showConsoleProgress", "false")
.config("spark.sql.legacy.allowCreatingManagedTableUsingNonemptyLocation", "true")
.config("spark.shuffle.service.enabled", "true")
.config("spark.sql.crossJoin.enabled", "true")
.appName("cishouseholds")
.enableHiveSupport()
.getOrCreate()
),
"xl": (
SparkSession.builder.config("spark.executor.memory", "20g")
.config("spark.yarn.executor.memoryOverhead", "2g")
.config("spark.executor.cores", 5)
.config("spark.dynamicAllocation.enabled", "true")
.config("spark.dynamicAllocation.maxExecutors", 12)
.config("spark.sql.shuffle.partitions", 240)
.config("spark.shuffle.service.enabled", "true")
.config("spark.ui.showConsoleProgress", "false")
.config("spark.sql.legacy.allowCreatingManagedTableUsingNonemptyLocation", "true")
.config("spark.shuffle.service.enabled", "true")
.config("spark.sql.crossJoin.enabled", "true")
.appName("cishouseholds")
.enableHiveSupport()
.getOrCreate()
),
}
def convert_cerberus_schema_to_pyspark(schema: Mapping[str, Any]) -> StructType:
"""
Convert a cerberus validation schema to a pyspark schema.
Assumes that schema is not nested.
The following are required in spark schema:
* `nullable` is False by default
* `metadata` is an empty dict by default
* `name` is the name of the field
"""
fields = [
{"metadata": {}, "name": name, "nullable": True, **values}
for name, values in schema.items()
if isinstance(values, dict)
]
return StructType.fromJson({"fields": fields, "type": "struct"})
def get_or_create_spark_session() -> SparkSession:
"""
Create a spark_session, hiding console progress and enabling HIVE table overwrite.
Session size is configured via pipeline config.
"""
config = get_config()
session_size = config.get("pyspark_session_size", "m")
spark_session = sessions[session_size]
return spark_session
def column_to_list(df: DataFrame, column_name: str):
"""Fast collection of all records in a column to a standard list."""
return [row[column_name] for row in df.collect()]
| 39.271028 | 90 | 0.6604 | [
"MIT"
] | ONS-SST/cis_households | cishouseholds/pyspark_utils.py | 4,202 | Python |
from flask import Flask, render_template, request
import model
from model import get_headlines
app = Flask(__name__)
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'POST':
url = request.form['url']
predict = model.predict(url)
value = predict[1]
clickbait = predict[2]
text = predict[3]
article_title = predict[0]
model.update(value)
model.update(clickbait)
return render_template('index.html',
value = value,
clickbait = clickbait,
text = text,
article_title=article_title,
url=url)
else:
return render_template('index.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/feed')
def feed():
headlines = get_headlines()
return render_template('feed.html',headlines = headlines)
@app.route('/trends')
def trends():
return render_template('trends.html',
num_fake = model.get_data("FAKE"),
num_real = model.get_data("REAL"),
num_clickbait = model.get_data("CLICKBAIT"), num_notclickbait = model.get_data("NOT CLICKBAIT"))
app.run(host='0.0.0.0', port=8080, debug=True)
| 5.229381 | 121 | 0.377526 | [
"MIT"
] | asaxena2019/FakeNewsify-Article-Credibility-Checker | app.py | 2,029 | Python |
from keras.models import load_model
from glob import glob
from metrics import auc, precision, recall, f1
def save_json(model, path):
model_json = model.to_json()
with open(path, "w") as json_file:
json_file.write(model_json)
def save_weights(model, path):
model.save_weights(path)
def resave_model(model_path, save_path):
model = load_model(model_path, custom_objects={"auc": auc,
"precision": precision,
"recall": recall,
"f1": f1})
save_json(model, save_path + '/model.json')
save_weights(model, save_path + '/model.h5')
if __name__ == '__main__':
model_folders = glob('./model/saved_models/*')
for model_folder in model_folders:
models = sorted(glob(model_folder + '/*.hdf5'))
last_model = models[-1]
resave_model(last_model, model_folder)
model_name = model_folder[model_folder.rfind('/') + 1:]
print('Model {} resaved!'.format(model_name))
| 33.90625 | 74 | 0.588018 | [
"MIT"
] | eugene-vasilev/Automatic-Tool-Annotation-for-CATARACT-Surgery | learning/model/keras_model_resave.py | 1,085 | Python |
from panda3d.core import *
from direct.distributed.PyDatagram import PyDatagram
from OTPInternalRepository import OTPInternalRepository
from direct.directnotify import DirectNotifyGlobal
from game.OtpDoGlobals import *
from realtime.types import *
from direct.distributed.AIZoneData import AIZoneDataStore
from game.TimeManagerAI import TimeManagerAI
from game.EstateManagerAI import EstateManagerAI
from game.TTHoodAI import TTHoodAI
from game.DDHoodAI import DDHoodAI
from game.DGHoodAI import DGHoodAI
from game.MMHoodAI import MMHoodAI
class AIRepository(OTPInternalRepository):
notify = DirectNotifyGlobal.directNotify.newCategory('AIRepository')
notify.setInfo(True)
GameGlobalsId = OTP_DO_ID_TOONTOWN
def __init__(self, baseChannel, serverId, districtName, dcFileNames):
OTPInternalRepository.__init__(self, baseChannel, serverId, dcFileNames=dcFileNames, dcSuffix='AI')
self.zoneDataStore = AIZoneDataStore()
self.districtName = districtName
self.districtPopulation = 0
self.districtId = self.ourChannel
self.hoods = []
self.zoneAllocator = UniqueIdAllocator(61000, 1 << 20)
def getGameDoId(self):
return self.GameGlobalsId
def getAvatarIdFromSender(self):
return self.getMsgSender() & 0xFFFFFFFF
def getAccountIdFromSender(self):
return (self.getMsgSender() >> 32) & 0xFFFFFFFF
def getZoneDataStore(self):
return self.zoneDataStore
def getAvatarExitEvent(self, avId):
return 'distObjDelete-%d' % avId
def allocateZone(self):
return self.zoneAllocator.allocate()
def deallocateZone(self, zoneId):
self.zoneAllocator.free(zoneId)
def handleConnected(self):
OTPInternalRepository.handleConnected(self)
# register the AI on the state server...
dg = PyDatagram()
dg.addServerHeader(self.serverId, self.ourChannel, STATESERVER_ADD_SHARD)
dg.addString(self.districtName)
dg.addUint32(self.districtPopulation)
self.send(dg)
# add a post remove to remove the shard from the state server
# when we disconnect from the message director...
dg = PyDatagram()
dg.addServerHeader(self.serverId, self.ourChannel, STATESERVER_REMOVE_SHARD)
self.addPostRemove(dg)
# create the AI globals...
self.createGlobals()
self.createZones()
def createGlobals(self):
self.timeManager = TimeManagerAI(self)
self.timeManager.generateWithRequired(OTP_ZONE_ID_OLD_QUIET_ZONE)
self.estateManager = EstateManagerAI(self)
self.estateManager.generateWithRequired(OTP_ZONE_ID_OLD_QUIET_ZONE)
def createZones(self):
if simbase.config.GetBool('want-toontown-central', False):
self.hoods.append(TTHoodAI(self))
if simbase.config.GetBool('want-donalds-dock', False):
self.hoods.append(DDHoodAI(self))
if simbase.config.GetBool('want-daisys-garden', False):
self.hoods.append(DGHoodAI(self))
if simbase.config.GetBool('want-minnies-melody-land', False):
self.hoods.append(MMHoodAI(self))
for hood in self.hoods:
hood.createObjects()
| 33.71875 | 107 | 0.713315 | [
"BSD-3-Clause"
] | AnythingTechPro/toontown-otp-original | game/AIRepository.py | 3,237 | Python |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.