id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
18835 | <reponame>aws-samples/aws-cdk-for-emr-on-eks
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
from aws_cdk import aws_ec2 as ec2, aws_eks as eks, core, aws_emrcontainers as emrc, aws_iam as iam, aws_s3 as s3, custom_resources as custom, aws_acmpca as acmpca, aws_emr as emr
"""
This stack deploys the following:
- EMR Studio
"""
class StudioLiveStack(core.Stack):
def __init__(self, scope: core.Construct, construct_id: str, vpc: ec2.IVpc, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
# Create S3 bucket for Studio
bucket = s3.Bucket(self, "StudioBucket",
encryption=s3.BucketEncryption.S3_MANAGED,
block_public_access=s3.BlockPublicAccess.BLOCK_ALL,
versioned = True
)
# Create security groups
eng_sg = ec2.SecurityGroup(self, "EngineSecurityGroup",
vpc=vpc,
description="EMR Studio Engine",
allow_all_outbound=True
)
core.Tags.of(eng_sg).add("for-use-with-amazon-emr-managed-policies", "true")
ws_sg = ec2.SecurityGroup(self, "WorkspaceSecurityGroup",
vpc=vpc,
description="EMR Studio Workspace",
allow_all_outbound=False
)
core.Tags.of(ws_sg).add("for-use-with-amazon-emr-managed-policies", "true")
ws_sg.add_egress_rule(ec2.Peer.any_ipv4(), ec2.Port.tcp(443), "allow egress on port 443")
ws_sg.add_egress_rule(eng_sg, ec2.Port.tcp(18888), "allow egress on port 18888 to eng")
eng_sg.add_ingress_rule(ws_sg, ec2.Port.tcp(18888), "allow ingress on port 18888 from ws")
# Create Studio roles
role = iam.Role(self, "StudioRole",
assumed_by=iam.ServicePrincipal("elasticmapreduce.amazonaws.com"),
managed_policies=[
iam.ManagedPolicy.from_aws_managed_policy_name("AmazonS3FullAccess")
]
)
role.add_to_policy(iam.PolicyStatement(
resources=["*"],
actions=["ec2:AuthorizeSecurityGroupEgress",
"ec2:AuthorizeSecurityGroupIngress",
"ec2:CreateSecurityGroup",
"ec2:CreateTags",
"ec2:DescribeSecurityGroups",
"ec2:RevokeSecurityGroupEgress",
"ec2:RevokeSecurityGroupIngress",
"ec2:CreateNetworkInterface",
"ec2:CreateNetworkInterfacePermission",
"ec2:DeleteNetworkInterface",
"ec2:DeleteNetworkInterfacePermission",
"ec2:DescribeNetworkInterfaces",
"ec2:ModifyNetworkInterfaceAttribute",
"ec2:DescribeTags",
"ec2:DescribeInstances",
"ec2:DescribeSubnets",
"ec2:DescribeVpcs",
"elasticmapreduce:ListInstances",
"elasticmapreduce:DescribeCluster",
"elasticmapreduce:ListSteps"],
effect=iam.Effect.ALLOW
))
core.Tags.of(role).add("for-use-with-amazon-emr-managed-policies", "true")
user_role = iam.Role(self, "StudioUserRole",
assumed_by=iam.ServicePrincipal("elasticmapreduce.amazonaws.com")
)
core.Tags.of(role).add("for-use-with-amazon-emr-managed-policies", "true")
user_role.add_to_policy(iam.PolicyStatement(
actions=["elasticmapreduce:CreateEditor",
"elasticmapreduce:DescribeEditor",
"elasticmapreduce:ListEditors",
"elasticmapreduce:StartEditor",
"elasticmapreduce:StopEditor",
"elasticmapreduce:DeleteEditor",
"elasticmapreduce:OpenEditorInConsole",
"elasticmapreduce:AttachEditor",
"elasticmapreduce:DetachEditor",
"elasticmapreduce:CreateRepository",
"elasticmapreduce:DescribeRepository",
"elasticmapreduce:DeleteRepository",
"elasticmapreduce:ListRepositories",
"elasticmapreduce:LinkRepository",
"elasticmapreduce:UnlinkRepository",
"elasticmapreduce:DescribeCluster",
"elasticmapreduce:ListInstanceGroups",
"elasticmapreduce:ListBootstrapActions",
"elasticmapreduce:ListClusters",
"elasticmapreduce:ListSteps",
"elasticmapreduce:CreatePersistentAppUI",
"elasticmapreduce:DescribePersistentAppUI",
"elasticmapreduce:GetPersistentAppUIPresignedURL",
"secretsmanager:CreateSecret",
"secretsmanager:ListSecrets",
"secretsmanager:TagResource",
"emr-containers:DescribeVirtualCluster",
"emr-containers:ListVirtualClusters",
"emr-containers:DescribeManagedEndpoint",
"emr-containers:ListManagedEndpoints",
"emr-containers:CreateAccessTokenForManagedEndpoint",
"emr-containers:DescribeJobRun",
"emr-containers:ListJobRuns"],
resources=["*"],
effect=iam.Effect.ALLOW
))
user_role.add_to_policy(iam.PolicyStatement(
resources=["*"],
actions=["servicecatalog:DescribeProduct",
"servicecatalog:DescribeProductView",
"servicecatalog:DescribeProvisioningParameters",
"servicecatalog:ProvisionProduct",
"servicecatalog:SearchProducts",
"servicecatalog:UpdateProvisionedProduct",
"servicecatalog:ListProvisioningArtifacts",
"servicecatalog:DescribeRecord",
"cloudformation:DescribeStackResources"],
effect=iam.Effect.ALLOW
))
user_role.add_to_policy(iam.PolicyStatement(
resources=["*"],
actions=["elasticmapreduce:RunJobFlow"],
effect=iam.Effect.ALLOW
))
user_role.add_to_policy(iam.PolicyStatement(
resources=[role.role_arn,
f"arn:aws:iam::{self.account}:role/EMR_DefaultRole",
f"arn:aws:iam::{self.account}:role/EMR_EC2_DefaultRole"],
actions=["iam:PassRole"],
effect=iam.Effect.ALLOW
))
user_role.add_to_policy(iam.PolicyStatement(
resources=["arn:aws:s3:::*"],
actions=["s3:ListAllMyBuckets",
"s3:ListBucket",
"s3:GetBucketLocation"],
effect=iam.Effect.ALLOW
))
user_role.add_to_policy(iam.PolicyStatement(
resources=[f"arn:aws:s3:::{bucket.bucket_name}/*",
f"arn:aws:s3:::aws-logs-{self.account}-{self.region}/elasticmapreduce/*"],
actions=["s3:GetObject"],
effect=iam.Effect.ALLOW
))
policy_document = {
"Version": "2012-10-17T00:00:00.000Z",
"Statement": [
{
"Action": [
"elasticmapreduce:CreateEditor",
"elasticmapreduce:DescribeEditor",
"elasticmapreduce:ListEditors",
"elasticmapreduce:StartEditor",
"elasticmapreduce:StopEditor",
"elasticmapreduce:DeleteEditor",
"elasticmapreduce:OpenEditorInConsole",
"elasticmapreduce:AttachEditor",
"elasticmapreduce:DetachEditor",
"elasticmapreduce:CreateRepository",
"elasticmapreduce:DescribeRepository",
"elasticmapreduce:DeleteRepository",
"elasticmapreduce:ListRepositories",
"elasticmapreduce:LinkRepository",
"elasticmapreduce:UnlinkRepository",
"elasticmapreduce:DescribeCluster",
"elasticmapreduce:ListInstanceGroups",
"elasticmapreduce:ListBootstrapActions",
"elasticmapreduce:ListClusters",
"elasticmapreduce:ListSteps",
"elasticmapreduce:CreatePersistentAppUI",
"elasticmapreduce:DescribePersistentAppUI",
"elasticmapreduce:GetPersistentAppUIPresignedURL",
"secretsmanager:CreateSecret",
"secretsmanager:ListSecrets",
"emr-containers:DescribeVirtualCluster",
"emr-containers:ListVirtualClusters",
"emr-containers:DescribeManagedEndpoint",
"emr-containers:ListManagedEndpoints",
"emr-containers:CreateAccessTokenForManagedEndpoint",
"emr-containers:DescribeJobRun",
"emr-containers:ListJobRuns"
],
"Resource": "*",
"Effect": "Allow",
"Sid": "AllowBasicActions"
},
{
"Action": [
"servicecatalog:DescribeProduct",
"servicecatalog:DescribeProductView",
"servicecatalog:DescribeProvisioningParameters",
"servicecatalog:ProvisionProduct",
"servicecatalog:SearchProducts",
"servicecatalog:UpdateProvisionedProduct",
"servicecatalog:ListProvisioningArtifacts",
"servicecatalog:DescribeRecord",
"cloudformation:DescribeStackResources"
],
"Resource": "*",
"Effect": "Allow",
"Sid": "AllowIntermediateActions"
},
{
"Action": [
"elasticmapreduce:RunJobFlow"
],
"Resource": "*",
"Effect": "Allow",
"Sid": "AllowAdvancedActions"
},
{
"Action": "iam:PassRole",
"Resource": [
role.role_arn,
f"arn:aws:iam::{self.account}:role/EMR_DefaultRole",
f"arn:aws:iam::{self.account}:role/EMR_EC2_DefaultRole"
],
"Effect": "Allow",
"Sid": "PassRolePermission"
},
{
"Action": [
"s3:ListAllMyBuckets",
"s3:ListBucket",
"s3:GetBucketLocation"
],
"Resource": "arn:aws:s3:::*",
"Effect": "Allow",
"Sid": "S3ListPermission"
},
{
"Action": [
"s3:GetObject"
],
"Resource": [
f"arn:aws:s3:::{bucket.bucket_name}/*",
f"arn:aws:s3:::aws-logs-{self.account}-{self.region}/elasticmapreduce/*"
],
"Effect": "Allow",
"Sid": "S3GetObjectPermission"
}
]
}
custom_policy_document = iam.PolicyDocument.from_json(policy_document)
new_managed_policy = iam.ManagedPolicy(self, "LBControlPolicy",
document=custom_policy_document
)
# Set up Studio
studio = emr.CfnStudio(self, "MyEmrStudio",
auth_mode = "SSO", default_s3_location = f"s3://{bucket.bucket_name}/studio/",
engine_security_group_id = eng_sg.security_group_id,
name = "MyEmrEksStudio",
service_role = role.role_arn,
subnet_ids = [n.subnet_id for n in vpc.private_subnets],
user_role = user_role.role_arn,
vpc_id = vpc.vpc_id,
workspace_security_group_id = ws_sg.security_group_id,
description=None,
tags=None)
core.CfnOutput(
self, "StudioUrl",
value=studio.attr_url
)
# Create session mapping
studiosm = emr.CfnStudioSessionMapping(self, "MyStudioSM",
identity_name = self.node.try_get_context("username"),
identity_type = "USER",
session_policy_arn = new_managed_policy.managed_policy_arn,
studio_id = studio.attr_studio_id) | StarcoderdataPython |
3268372 | # -*- coding: utf-8 -*-
"""
Created on Sun May 28 09:02:02 2017
@author: andi
"""
def storageRequirements( N ):
if N < 2:
return N
else:
return N + storageRequirements( (N+1)//2 ) | StarcoderdataPython |
3242083 | <filename>api/serializers.py
# api/serializers.py
from rest_framework import serializers
from .models import BucketList
class BucketListSerializer(serializers.ModelSerializer):
"""Serialize Models to JSON"""
owner = serializers.ReadOnlyField(source='owner.username')
class Meta:
"""Meta class mapping model fields to serializer fields"""
model = BucketList
fields = ('id', 'name', 'owner','date_created', 'date_modified')
read_only_fields = ('date_created', 'date_modified') | StarcoderdataPython |
3222395 | import warnings
from collections import namedtuple
import numpy as np
import h5py
from ecogdata.channel_map import ChannelMap
from ecogdata.trigger_fun import process_trigger
from .file2data import FileLoader
gain = {
'2t-as daq v1' : 10,
'2t-as daq v2' : 10
}
pitch_lookup = {
'actv_64' : 0.4,
'active_1008ch_sp_v2' : (0.3214, 0.25) # pitch is dx, dy
}
DAQunmix = namedtuple('DAQunmix', ['col', 'row', 'extra_col', 'extra_row'])
active_headstages = ('zif26 to 2x uhdmi',
'zif26 to 2x 20 pins harwin to 2x uhdmi',
'zif to 50mil',
'zif51_p4-50_demux-14c20r',)
def load_active(exp_path, name, electrode, daq, headstage, bnc=(), trigger_idx=0, **load_kwargs):
"""
Parameters
----------
exp_path: str
Path for experiment recordings
name: str
Name of the recording to load
electrode:
Electrode tag
daq:
DAQ equipment tag
headstage:
Headstage equipment tag
bnc: int or sequence
Columns in the acquired data corresponding to BNC inputs
trigger_idx: int
If there are BNC columns, then this one corresponds to a timestamp trigger.
**load_kwargs: dict
Other arguments for the FileLoader type
Returns
-------
dataset: Bunch
Bunch containing ".data" (a DataSource), ".chan_map" (a ChannelMap), and many other metadata attributes.
"""
loader = ActiveLoader(exp_path, name, electrode, daq, headstage, bnc=bnc, trigger_idx=trigger_idx, **load_kwargs)
return loader.create_dataset()
def get_daq_unmix(daq, headstage, electrode, row_order=()):
daq = daq.lower()
headstage = headstage.lower()
electrode = electrode.lower()
row_order = list(map(int, row_order))
# e.g. penn data 4/28/2016
if (daq == '2t-as daq v2') and (headstage == 'zif26 to 2x uhdmi') and \
(electrode == 'actv_64'):
col_order = [2, 1, 5, 8, 7, 6, 9, 0, 4, 3]
if not len(row_order):
row_order = [0, 1, 2, 3, 7, 4, 6, 5]
col = [col_order.index(i) for i in range(len(col_order))]
row = [row_order.index(i) for i in range(len(row_order))]
# diagnostic channels are last 2 columns
extra_col = col[-2:]
col = col[:-2]
unmix = DAQunmix(np.array(col[::-1]), np.array(row), extra_col, ())
# e.g. duke data winter/spring 2016
elif (daq == '2t-as daq v1') and \
(headstage == 'zif26 to 2x 20 pins harwin to 2x uhdmi') and \
(electrode == 'actv_64'):
col_order = [7, 9, 8, 2, 4, 5, 1, 0, 3, 6]
col = [col_order.index(i) for i in range(len(col_order))]
extra_col = [1, 4]
for c in extra_col:
col.remove(c)
col = np.array(col)
# This is Ken's original order
if not len(row_order):
row_order = [6, 5, 1, 0, 2, 3, 7, 4]
row = [row_order.index(i) for i in range(len(row_order))]
# This is Ken's 2nd order (sequential)
#row = range(8)
# this is Ken's 3rd order (skip 3)
#row = list( (np.arange(8) * 3) % 8 )
unmix = DAQunmix(col[::-1], row, extra_col, ())
# e.g. duke data from 4/26/2016
elif (daq == '2t-as daq v1') and (headstage == 'zif26 to 2x uhdmi') and \
(electrode == 'actv_64'):
col_order = list( np.array([6, 9, 8, 7, 10, 1, 5, 4, 3, 2]) - 1 )
if not len(row_order):
row_order = list( np.array([1, 2, 3, 4, 8, 5, 6, 7]) - 1 )
col = [col_order.index(i) for i in range(len(col_order))]
extra_col = col[-2:]
col = col[:-2]
row = [row_order.index(i) for i in range(len(row_order))]
unmix = DAQunmix(np.array(col[::-1]), np.array(row), extra_col, ())
elif (daq == '2t-as daq v1') and (headstage == 'zif to 50mil') and \
(electrode == 'cardiac v1'):
col_order = np.array([12, 14, 17, 19, 5, 11, 13, 16, 18,
20, 2, 4, 7, 9, 15, 10, 8, 6, 3, 1]) - 1
if not len(row_order):
row_order = np.array([16, 1, 6, 8, 4, 20, 2, 12, 14, 17, 9,
22, 21, 10, 13, 18, 3, 19, 7, 11, 15, 5]) - 1
# reorder to my convention
col = [list(col_order).index(i) for i in range(len(col_order))]
# remove floating and ref channels
extra_col = [4, 14]
col.remove(4)
col.remove(14)
row = [list(row_order).index(i) for i in range(len(row_order))]
unmix = DAQunmix(np.array(col[::-1]), np.array(row), extra_col, ())
elif (daq == '2t-as daq v2') and (headstage == 'zif51_p4-50_demux-14c20r') \
and (electrode == 'active_1008ch_sp_v2'):
col_order = np.array([8, 7, 11, 14, 13, 12, -1, 1, 5,
4, 3, 2, 6, 10, 9, 28, 27, 22, 16, 18,
20, -1, 15, 23, 21, 19, 17, 25, 24, 26]) - 1
col = [list(col_order).index(i) for i in np.sort( col_order[col_order>=0] )]
if not len(row_order):
row_order = np.array([8, 6, 2, 4, 18, 14, 16, 1, 3, 10, 12, 5, 7, 11,
9, 17, 15, 13, 26, 24, 20, 22, 36, 32, 34, 19,
21, 28, 30, 23, 25, 29, 27, 35, 33, 31]) - 1
row = [list(row_order).index(i) for i in range(len(row_order))]
extra_col = np.where(col_order < 0)[0]
unmix = DAQunmix(np.array(col[::-1]), np.array(row), extra_col, ())
elif daq.lower() == 'passthru':
unmix = DAQunmix(slice(None), slice(None), (), ())
else:
err = ['Combination unknown:',
'DAQ {0}'.format(daq),
'Headstage {0}'.format(headstage),
'Electrode {0}'.format(electrode)]
raise NotImplementedError('\n'.join(err))
return unmix
class ActiveLoader(FileLoader):
transpose_array = True
permissible_types = ['.mat', '.h5', '.hdf']
def __init__(self, experiment_path, recording, electrode, daq, headstage, bnc=(), **kwargs):
self.daq_type = daq
self.headstage_type = headstage
self.bnc_columns = bnc
self.scale_to_uv = 1e6 / gain.get(self.daq_type, 1.0)
super(ActiveLoader, self).__init__(experiment_path, recording, electrode, **kwargs)
with h5py.File(self.data_file, 'r') as h5file:
shape = h5file['data'].shape
num_row = int(h5file['numRow'][()])
num_chan = int(h5file['numChan'][()])
total_channels = num_row * num_chan
# if this is a downsample file, check for an extracted BNC array
source_has_bnc = 'bnc' in h5file
self.transpose_array = (shape[1] == total_channels)
if bnc:
if source_has_bnc:
self.aligned_arrays = ['bnc']
else:
bnc_channels = np.concatenate([np.arange(bnc * num_row, (bnc + 1) * num_row) for bnc in bnc])
self.aligned_arrays = [('bnc', bnc_channels)]
def create_downsample_file(self, data_file, resample_rate, downsamp_file, **kwargs):
# The parent method creates a channel-compatible source file with anti-aliased downsamples in the channel
# array. For active electrode data with all external channels (e.g. logic levels) packed into the main data
# array, a side effect is that the external channels will be anti-alias filtered as well.
# However, the new source file will have a separate "bnc" array that is downsampled w/o filtering.
new_file = super(ActiveLoader, self).create_downsample_file(data_file, resample_rate, downsamp_file, **kwargs)
# add in the other metadata -- note that this assumes that create_downsample creates a mapped file,
# which may change
with h5py.File(data_file, 'r') as f1, h5py.File(new_file, 'r+') as f2:
samp_rate = f1['Fs'][()]
samp_rate[:] = resample_rate
f2['Fs'] = samp_rate
for k in f1.keys():
if k not in (self.data_array, 'Fs', 'bnc'):
try:
f2[k] = f1[k][()]
except AttributeError:
pass
# shorten this to the extracted BNC array
self.aligned_arrays = ['bnc']
return new_file
def make_channel_map(self):
unmix = get_daq_unmix(self.daq_type, self.headstage_type, self.electrode)
with h5py.File(self.data_file, 'r') as h5file:
nrow = int(h5file['numRow'][()])
ncol = int(h5file['numCol'][()])
pitch = pitch_lookup.get(self.electrode, 1.0)
# go through channels,
# if channel is data, put down the array matrix location
# else, put down a disconnected channel
data_rows = list(unmix.row)
data_cols = list(unmix.col)
# data_chans = np.array(data_cols) * nrow + np.array(data_rows)
electrode_chans = []
chan_map = []
other_chans = []
for c in range(nrow * ncol):
col = c // nrow
row = c % nrow
if col in data_cols:
arow = data_rows.index(row)
acol = data_cols.index(col)
chan_map.append(arow * len(data_cols) + acol)
electrode_chans.append(c)
else:
other_chans.append(c)
nr = len(unmix.row)
nc = len(unmix.col)
cm = ChannelMap(chan_map, (nr, nc), pitch=pitch, col_major=False)
return cm, electrode_chans, other_chans, []
def find_trigger_signals(self, data_file):
bnc_columns = self.bnc_columns
if not bnc_columns:
return (), ()
# If trigger index is an integer, proceed. If not and it evaluates false, then skip
if not isinstance(self.trigger_idx, int) and not self.trigger_idx:
return (), ()
if not np.iterable(bnc_columns):
bnc_columns = (bnc_columns,)
trigger_idx = self.trigger_idx
if np.iterable(trigger_idx):
trigger_idx = trigger_idx[0]
with h5py.File(data_file, 'r') as h5file:
nrow = int(h5file['numRow'][()])
# if this is a downsample file, it should be the case that a BNC array has been extracted and downsampled
# without filtering
if 'bnc' in h5file:
bnc_data = h5file['bnc'][:].reshape(len(bnc_columns), nrow, -1)
else:
bnc_channels = np.concatenate([np.arange(bnc * nrow, (bnc + 1) * nrow) for bnc in bnc_columns])
if self.transpose_array:
bnc_data = h5file['data'][:, bnc_channels].T
else:
bnc_data = h5file['data'][bnc_channels, :]
bnc_data = bnc_data.reshape(len(bnc_columns), nrow, -1)
try:
trigger_signal = bnc_data[trigger_idx]
pos_edge = process_trigger(trigger_signal)[0]
except (IndexError, ValueError) as e:
tb = e.__traceback__
msg = 'Trigger channels were specified but do not exist'
if self.raise_on_glitch:
raise Exception(msg).with_traceback(tb)
else:
warnings.warn(msg, RuntimeWarning)
return trigger_signal, pos_edge
# def slice_style(cmd_str):
# if cmd_str.find('skip') >= 0:
# cmd1, cmd2 = cmd_str.split('-')
# if cmd2 == 'odd':
# return slice(None, None, 2)
# if cmd2 == 'even':
# return slice(1, None, 2)
# else:
# n = int(cmd1.replace('skip', ''))
# idx = list(map(int, cmd2.split(',')))
# select = np.setdiff1d(np.arange(n), np.array(idx))
# return select
# elif cmd_str.find('all') >= 0:
# return slice(None)
# else:
# raise NotImplementedError('slicing not known')
#
#
# def rawload_active(
# exp_path, test, gain, shm=False,
# bnc=(), unmix=None, row_cmd=''
# ):
# # splits the raw TDMS file into channel data and BNC data
#
# try:
# raw_load = load_bunch(os.path.join(exp_path, test + '.h5'), '/')
# except IOError:
# raw_load = load_bunch(os.path.join(exp_path, test + '.mat'), '/')
#
# try:
# Fs = raw_load.Fs
# except:
# Fs = raw_load.fs
#
# shape = raw_load.data.shape
# if shape[1] < shape[0]:
# raw_load.data = raw_load.data.transpose()
# nrow, ncol_load = list(map(int, (raw_load.numRow, raw_load.numCol)))
# nchan = int(raw_load.numChan)
# if raw_load.data.shape[0] < nchan * nrow:
# # each row of data needs to be demuxed as (nsamp, nrow)
# # since rows are serially sampled in every pass
# demux = raw_load.data.reshape(nchan, -1, nrow).transpose(0, 2, 1)
# else:
# demux = raw_load.data.reshape(nchan, nrow, -1)
#
# del raw_load['data']
# if unmix is None:
# extra = range(ncol_load, nchan)
# unmix = DAQunmix(slice(0, ncol_load), slice(None), extra, ())
# col_slice = unmix.col
# row_slice = unmix.row
# extra_col = unmix.extra_col
# extra_row = unmix.extra_row # currently unused
#
# # get BNC channels (triggers and stims, etc) and any extra channels
# bnc = list(map(int, bnc))
# bnc_chans = demux[bnc].copy() if len(bnc) else ()
# extra_chans = demux[extra_col].copy() if len(extra_col) else ()
#
# # get electrode channels
# cdata = demux[col_slice]
# del demux
# while gc.collect():
# pass
# cdata = cdata[:, row_slice, :]
# f = row_cmd.find('avg')
# if f >= 0:
# n_avg = int(row_cmd[f + 3:])
# # reshape the data into (n_col, n_row', n_avg, n_pts)
# nrow = nrow / n_avg
# shp = list(cdata.shape)
# shp[1] = nrow
# shp.insert(2, n_avg)
# cdata = cdata.reshape(shp).mean(-2)
# else:
# nrow = cdata.shape[1]
# if shm:
# data = shared_copy(cdata)
# else:
# data = cdata.copy()
# del cdata
# while gc.collect():
# pass
# data.shape = (-1, data.shape[-1])
# data /= gain
# ncol = data.shape[0] / nrow
# try:
# info = tdms_info(raw_load.info)
# except AttributeError:
# info = None
# return data, bnc_chans, extra_chans, Fs, (nrow, ncol), info
#
#
# def load_active(exp_path, name, electrode, daq, headstage,
# bandpass=(), notches=(), trigger=0,
# snip_transient=True, units='uV', save=False,
# row_order=(), bnc=(), **load_kws
# ):
# """
# Load a variety of active-electrode data formats.
#
# * exp_path, name: the path and recording file name (without extension)
# * electrode: name of electrode used
# * daq: data-acquisition system (see below)
# * other parameters straightforward
#
# The DAQ label identifies a particular electrode-indexing scheme. In
# principle columns and rows can be permuted in any order, and the DAQ
# label is specific to a single order for a given electrode.
#
# """
#
# unmix = get_daq_unmix(daq, headstage, electrode, row_order=row_order)
# data, bnc_chans, extra_chans, Fs, eshape, info = rawload_active(
# exp_path, name, gain[daq.lower()],
# shm=True, unmix=unmix, bnc=bnc, **load_kws
# )
#
# # get triggers
# if len(bnc_chans):
# pos_edge, trig = process_trigger(bnc_chans[int(trigger)])
# # re-mux the BNC channels
# #bnc_chans = bnc_chans.transpose(0, 1, 2)
# #bnc_chans = bnc_chans.reshape(bnc_chans.shape[0], -1)
# else:
# pos_edge = ()
# trig = None
#
# # deal with extra chans
# if len(extra_chans):
# extra_chans = extra_chans.reshape(extra_chans.shape[0], -1)
#
# # get electrode channel map
# ii, jj = np.mgrid[:eshape[0], :eshape[1]]
# # channels are ordered in column-major (i.e. rows count serially)
# chan_map = ut.mat_to_flat(
# eshape, ii.ravel('F'), jj.ravel('F'), col_major=False
# )
# # describe this order in row-major fashion
# chan_map = ut.ChannelMap(chan_map, eshape, col_major=False,
# pitch=pitch_lookup.get(electrode, 1))
#
# if units.lower() != 'v':
# convert_scale(data, 'v', units)
#
# # do highpass filtering for stationarity
# if bandpass:
# # remove DC from rows
# if bandpass[0] > 0:
# data -= data.mean(1)[:,None]
# ft.filter_array(
# data,
# design_kwargs=dict(lo=bandpass[0], hi=bandpass[1], Fs=Fs),
# filt_kwargs=dict(filtfilt=True)
# )
#
# if notches:
# ft.notch_all(
# data, Fs, lines=notches, inplace=True, filtfilt=True
# )
#
#
# if snip_transient:
# if isinstance(snip_transient, bool):
# snip_len = int( Fs * 5 )
# else:
# snip_len = int( Fs * snip_transient )
# if len(pos_edge):
# pos_edge -= snip_len
# pos_edge = pos_edge[pos_edge > 0]
# trig = trig[...,snip_len:].copy()
# if len(bnc_chans):
# f = bnc_chans.shape[-1] / data.shape[-1]
# bnc_chans = bnc_chans[...,snip_len*f:].copy()
# if len(extra_chans):
# f = extra_chans.shape[-1] / data.shape[-1]
# extra_chans = extra_chans[...,snip_len*f:].copy()
#
# data = data[...,snip_len:].copy()
#
# dset = ut.Bunch()
# dset.pos_edge = pos_edge
# dset.data = data
# dset.extra_chans = extra_chans
# dset.bnc = bnc_chans
# dset.chan_map = chan_map
# dset.Fs = Fs
# while not os.path.split(exp_path)[1]:
# exp_path = os.path.split(exp_path)[0]
# dset.name = '.'.join( [os.path.split(exp_path)[1], name] )
# dset.bandpass = bandpass
# dset.trig = trig
# dset.transient_snipped = snip_transient
# dset.units = units
# dset.notches = notches
# dset.info = info
#
# return dset
| StarcoderdataPython |
60938 | <reponame>armijoalb/M-ster-Ciencias-de-Datos-UGR<gh_stars>0
import cv2 as cv
import numpy as np
from functions import loadImages
import time
class LBP:
def __init__(self):
self.window_width = 64
self.window_heigth = 128
self.block_width = 16
self.block_heigth = 16
self.desp_x = 8
self.desp_y = 8
self.potencias_2 = np.array([2**i for i in range(7,-1,-1)])
def checkPixel(self,pixel_value,image,x,y):
value = 0
try:
if(image[y][x] >= pixel_value):
value=1
except:
pass
return value
def computeLBPpixel(self,center_x,center_y,block):
positions = [ [center_y-1,i] for i in range(center_x-1,center_x+2)]
positions.append([center_y,center_x+1])
positions.extend([[center_y+1,i] for i in range(center_x+1,center_x-2,-1)])
positions.append([center_y,center_x-1])
pixel_value = block[center_y][center_x]
positions = np.array(positions)
values = [block[y,x] for y,x in positions]
code = np.where(values>=pixel_value,1,0)
# print(code)
# print(self.potencias_2)
lbp_value = np.dot(code,self.potencias_2)
return lbp_value
def computeLBPblock(self,ini_x,ini_y,image):
return np.array([self.computeLBPpixel(x,y,image)
for y in range(ini_y,ini_y+self.block_heigth)
for x in range(ini_x,ini_x+self.block_width)],dtype=np.float)
def computeLBPWindow(self,image,ini_x=1,ini_y=1):
size_y, size_x = [self.window_heigth,self.window_width]
# TODO cambiar pos iniciales solamente para valores que y,x + 16 < size_y,size_x
pos_iniciales = [[y,x] for y in range(ini_y,size_y,self.desp_y) for x in range(ini_x,size_x,self.desp_x)
if (x+self.block_width) <= (size_x+ini_x) and (y+self.block_heigth) <= (size_y+ini_y)]
lbp_hist = [self.computeLBPblock(x,y,image) for y,x in pos_iniciales]
lbp_hist = np.array([np.array(block_hist) for block_hist in lbp_hist]).flatten()
return lbp_hist
def compute(self,image):
gray_image = cv.cvtColor(image,cv.COLOR_RGB2GRAY)
bigger_image = np.pad(gray_image,1,'constant',constant_values = 255)
return np.array(self.computeLBPWindow(bigger_image),dtype=np.float)
# image = cv.imread('ECI.Practica/data/train/pedestrians/AnnotationsPos_0.000000_crop_000010a_0.png',cv.IMREAD_COLOR)
# gray = cv.cvtColor(image,cv.COLOR_RGB2GRAY)
# bigger = np.pad(gray,1,'constant',constant_values=0)
# tiempo en cargar los datos 3314.53182387352 segundos.
# start = time.time()
# d,c = loadImages(LBP())
# stop = time.time()
# print(stop-start)
# np.savez_compressed('lbp_data_good',d)
# np.savez_compressed('lbp_clases_good',c) | StarcoderdataPython |
157268 | <filename>setup.py
from distutils.core import setup
setup(name = 'RedditWallpaperScraper',
version = '0.0.1dev',
packages = 'reddit-wallpaper-scraper',
long_description=open('README.md').read()
)
| StarcoderdataPython |
144134 | <filename>topicnet/cooking_machine/cubes/controller_cube.py
"""
Allows to add `ControllerAgent` (with unknown parameters) to the model, which enables user to
change `tau` during the `_fit` method.
`parameters` is a dict with four fields:
Fields
------
reg_name: str
The name of regularizer. We want to change the tau coefficient of it during training
Note that only one of ("reg_name", "regularizer") should be provided
regularizer: artm.regularizer.Regularizer
Regularizer object (if we want to add non-existing regularizer to the model)
Note that only one of ("reg_name", "regularizer") should be provided
score_to_track: str
The name of metric which we will track.
We assume that if that metric is 'sort of decreasing', then everything is OK
and we are allowed to change tau coefficient further; otherwise we revert back
to the last "safe" value and stop
More formal definition of "sort of decreasing": if we divide a curve into two parts like so:
#####################################
#. . . .. . . . .. . .. . . ... . #
#%. . . . . . . .. . . . . . . ..#
#:t . . . . . . . . . . . . . . . .#
# t: . . . . . . . . . . . . . . ...#
#. %. . . . . . . . . . . . . . . .#
#. :t. . . . . . . . . . . . . . .#
#.. ;; . . . . . . . . . . . . ..#
# ..t.. . . . . . . . . . . . . .#
#. . :t .. . . . . . . . . . . . ..#
#. .. t: . . . . . . . . . . . . . .#
#. ..S: . . . . . . . . . . . . ..#
#. . . .:;: . . . . . . . . . . . .#
#. . . . :;; . . . . . . . . . . .#
#. . . . .. :%. nmmMMmmn . .#
# . . . . .tt%.ztttt"' '""ttttttt#
#. . . . . . '"' . . . . . . . . #
#####################################
| | |
| left part | |
global minimum |
| right part |
then the right part is no higher than 5% of global minimum
(you can change 5% if you like by adjusting `fraction_threshold`
in `is_score_out_of_control` function)
If score_to_track is None, then `ControllerAgent` will never stop
(useful for e.g. decaying coefficients)
tau_converter: str or callable
Notably, def-style functions and lambda functions are allowed
If it is function, then it should accept four arguments:
`(initial_tau, prev_tau, cur_iter, user_value)`
For example:
>> lambda initial_tau, prev_tau, cur_iter, user_value:
>> initial_tau if cur_iter % 2 == 0 else 0
(Note that experiment description might display lambda functions incorrectly;
Try to keep them to a single line or use def-style functions instead)
>> def func(initial_tau, prev_tau, cur_iter, user_value):
>> relu_grower = user_value * (cur_iter - 8) if cur_iter > 8 else 0
>> return 0 if cur_iter % 2 else relu_grower
If it is a string, then it should be an expression consisting of numbers, operations
and variables (four are allowed: `initial_tau, prev_tau, cur_iter, user_value`)
For example:
`>> "initial_tau * ((cur_iter + 1) % 2)"`
or
`>> "prev_tau * user_value"`
user_value_grid: list of numeric
Values for user_value variable
When writing `tau_converter`, you can use user_value variable.
For example:
>> tau_converter: "prev_tau * user_value"
>> user_value_grid: [1, 0.99, 0.95, 0.90, 0.80, 0.5]
(I know that tau should decay exponentially, but I'm unsure of exact half-life)
>> tau_converter: "prev_tau + user_value"
>> user_value_grid: [50, 100, 150, 200, 250]
(I know that tau should increase linearly, but I'm unsure of exact speed)
>> def func(initial_tau, prev_tau, cur_iter, user_value):
>> new_tau = 50 * (cur_iter - user_value) if cur_iter > user_value else 0
>> return new_tau
>> tau_converter: func
>> user_value_grid: [10, 15, 20, 25, 30]
(Tau should start with zero, then increase linearly. I don't know when to start this process)
max_iter: numeric
Optional (default value is `num_iter` specified for cube)
Agent will stop changing tau after `max_iters` iterations
`max_iters` could be `float("NaN")` and `float("inf")` values:
that way agent will continue operating even outside this `RegularizationControllerCube`
""" # noqa: W291
from .base_cube import BaseCube
from ..rel_toolbox_lite import count_vocab_size, handle_regularizer
import numexpr as ne
import warnings
from dill.source import getsource
from copy import deepcopy
import numpy as np
W_HALT_CONTROL = "Process of dynamically changing tau was stopped at {} iteration"
W_MAX_ITERS = "Maximum number of iterations is exceeded; turning off"
def is_score_out_of_control(model, score_name, fraction_threshold=0.05):
"""
Returns True if score isn't 'sort of decreasing' anymore.
See docstring for RegularizationControllerCube for details
Parameters
----------
model : TopicModel
score_name : str or None
fraction_threshold : float
Returns
-------
bool
"""
if score_name not in model.scores: # case of None is handled here as well
return False
vals = model.scores[score_name]
if len(vals) == 0:
return False
idxmin = np.argmin(vals)
if idxmin == len(vals): # score is monotonically decreasing
return False
maxval = max(vals[idxmin:])
minval = vals[idxmin]
answer = ((maxval - minval)/abs(minval) - 1.0) > fraction_threshold
if answer:
msg = (f"Score {score_name} is too high: during training the value {maxval}"
f" passed a treshold of {(1 + fraction_threshold) * minval}"
f" (estimate is based on {idxmin} iteration)")
warnings.warn(msg)
return answer
class ControllerAgent:
"""
Allows to change `tau` during the `_fit` method.
Each `TopicModel` has a `.callbacks` attribute.
This is a list consisting of various `ControllerAgent`s.
Each agent is described by:
* reg_name: the name of regularizer having `tau` which needs to be changed
* score_to_track: score providing control of the callback execution
* tau_converter: function or string describing how to get new `tau` from old `tau`
* local_dict: dictionary containing values of several variables,
most notably, `user_value`
* is_working:
if True, agent will attempt to change tau until something breaks.
if False, agent will assume that something had been broken and will
revert to the last known safe value (without trying to change anything further)
See top-level docstring for details.
"""
def __init__(self, reg_name, score_to_track, tau_converter, max_iters, local_dict=dict()):
"""
Parameters
----------
reg_name : str
score_to_track : str, list of str or None
tau_converter : callable or str
local_dict : dict
max_iters : int or float
Agent will stop changing tau after `max_iters` iterations
`max_iters` could be `float("NaN")` and `float("inf")` values:
that way agent will continue operating even outside this `RegularizationControllerCube`
"""
self.reg_name = reg_name
self.tau_converter = tau_converter
if isinstance(score_to_track, list):
self.score_to_track = score_to_track
elif isinstance(score_to_track, str):
self.score_to_track = [score_to_track]
else:
self.score_to_track = []
self.is_working = True
self.local_dict = local_dict
self.tau_history = []
self.max_iters = max_iters
def _convert_tau(self):
""" """
if isinstance(self.tau_converter, str):
new_tau = ne.evaluate(self.tau_converter, local_dict=self.local_dict)
# numexpr returns np.ndarray (which is a scalar in our case)
new_tau = float(new_tau)
else:
new_tau = self.tau_converter(**self.local_dict)
return new_tau
def _find_safe_tau(self):
""" """
if len(self.tau_history) < 2:
warnings.warn("Reverting tau to 0")
safe_tau = 0
else:
safe_tau = self.tau_history[-2]
return safe_tau
def invoke(self, model, cur_iter):
"""
Attempts to change tau if `is_working == True`. Otherwise, keeps to the last safe value.
Parameters
----------
model : TopicModel
cur_iter : int
Note that zero means "cube just started", not "the model is brand new"
"""
current_tau = model.regularizers[self.reg_name].tau
self.tau_history.append(current_tau)
self.local_dict["prev_tau"] = current_tau
self.local_dict["cur_iter"] = cur_iter
if "initial_tau" not in self.local_dict:
self.local_dict["initial_tau"] = current_tau
if self.is_working and len(self.tau_history) > self.max_iters:
warnings.warn(W_MAX_ITERS)
self.is_working = False
if self.is_working:
should_stop = any(
is_score_out_of_control(model, score) for score in self.score_to_track
)
if should_stop:
warnings.warn(W_HALT_CONTROL.format(len(self.tau_history)))
self.is_working = False
model.regularizers[self.reg_name].tau = self._find_safe_tau()
else:
model.regularizers[self.reg_name].tau = self._convert_tau()
class RegularizationControllerCube(BaseCube):
def __init__(self, num_iter: int, parameters,
reg_search='grid', use_relative_coefficients: bool = True, strategy=None,
tracked_score_function=None, verbose: bool = False, separate_thread: bool = True):
"""
Initialize stage. Checks params and update internal attributes.
Parameters
----------
num_iter : int
number of iterations or method
parameters : list[dict] or dict
regularizers params
each dict should contain the following fields:
("reg_name" or "regularizer"),
"score_to_track" (optional),
"tau_converter",
"user_value_grid"
See top-level docstring for details.
Examples:
>> {"regularizer": artm.regularizers.<...>,
>> "score_to_track": "PerplexityScore@all",
>> "tau_converter": "prev_tau * user_value",
>> "user_value_grid": [0.5, 1, 2]}
-----------
>> {"reg_name": "decorrelator_for_ngramms",
>> "score_to_track": None,
>> "tau_converter": (
>> lambda initial_tau, prev_tau, cur_iter, user_value:
>> initial_tau * (cur_iter % 2) + user_value
>> )
>> "user_value_grid": [0, 1]}
reg_search : str
"grid", "pair", "add" or "mul".
"pair" for elementwise grid search in the case of several regularizers
"grid" for the fullgrid search in the case of several regularizers
"add" and "mul" for the ariphmetic and geometric progression
respectively for PerplexityStrategy
(Default value = "grid")
use_relative_coefficients : bool
forces the regularizer coefficient to be in relative form
i.e. normalized over collection properties
strategy : BaseStrategy
optimization approach (Default value = None)
tracked_score_function : str ot callable
optimizable function for strategy (Default value = None)
verbose : bool
visualization flag (Default value = False)
""" # noqa: W291
super().__init__(num_iter=num_iter, action='reg_controller',
reg_search=reg_search, strategy=strategy, verbose=verbose,
tracked_score_function=tracked_score_function,
separate_thread=separate_thread)
self._relative = use_relative_coefficients
self.data_stats = None
self.raw_parameters = parameters
if isinstance(parameters, dict):
parameters = [parameters]
self._convert_parameters(parameters)
def _convert_parameters(self, all_parameters):
"""
Parameters
----------
all_parameters : list of dict
"""
for params_dict in all_parameters:
assert ("reg_name" in params_dict) != ("regularizer" in params_dict)
if "regularizer" in params_dict:
assert params_dict["regularizer"].tau is not None
self.parameters = [
{
"object": {
"reg_name": params_dict.get("reg_name", None),
"regularizer": params_dict.get("regularizer", None),
"score_to_track": params_dict.get("score_to_track", None),
"tau_converter": params_dict["tau_converter"],
"local_dict": {"user_value": None},
"max_iters": params_dict.get("max_iters", self.num_iter)
},
"field": "callback",
"values": params_dict.get('user_value_grid', [0])
}
for params_dict in all_parameters
]
def apply(self, topic_model, one_model_parameter, dictionary=None, model_id=None):
"""
Applies regularizers and controller agents to model
Parameters
----------
topic_model : TopicModel
one_model_parameter : list or tuple
dictionary : Dictionary
(Default value = None)
model_id : str
(Default value = None)
Returns
-------
TopicModel
"""
new_model = topic_model.clone(model_id)
new_model.parent_model_id = topic_model.model_id
modalities = dict()
if self._relative:
modalities = new_model.class_ids
if self.data_stats is None:
self.data_stats = count_vocab_size(dictionary, modalities)
for (agent_blueprint_template, field_name, current_user_value) in one_model_parameter:
agent_blueprint = dict(agent_blueprint_template)
if agent_blueprint["reg_name"] is None:
regularizer = agent_blueprint["regularizer"]
new_regularizer = deepcopy(regularizer)
handle_regularizer(
self._relative,
new_model,
new_regularizer,
self.data_stats,
)
agent_blueprint["reg_name"] = new_regularizer.name
else:
if agent_blueprint['reg_name'] not in new_model.regularizers.data:
error_msg = (f"Regularizer {agent_blueprint['reg_name']} does not exist. "
f"Cannot be modified.")
raise ValueError(error_msg)
agent_blueprint['local_dict']['user_value'] = current_user_value
# ControllerAgent needs only reg_name in constructor
agent_blueprint.pop("regularizer")
agent = ControllerAgent(**agent_blueprint)
new_model.callbacks.append(agent)
return new_model
def get_jsonable_from_parameters(self):
""" """
jsonable_parameters = []
for one_model_parameters in self.raw_parameters:
one_jsonable = dict(one_model_parameters)
converter = one_model_parameters['tau_converter']
if not isinstance(converter, str):
try:
# not always works, but this is not important
one_jsonable["tau_converter"] = str(getsource(converter))
except (TypeError, OSError):
# OSError: may arise if working in Jupyter Notebook
one_jsonable["tau_converter"] = "<NOT AVAILABLE>"
jsonable_parameters.append(one_jsonable)
return jsonable_parameters
| StarcoderdataPython |
1769374 | <gh_stars>1-10
# -*- coding: utf-8 -*-
import factory
from faker import Factory
from factory.fuzzy import FuzzyChoice
from faker.providers import misc, lorem
from apps.accounts.models.choices import Platform
from apps.accounts.models.phone_device import PhoneDevice
faker = Factory.create()
faker.add_provider(misc)
faker.add_provider(lorem)
class PhoneDeviceFactory(factory.django.DjangoModelFactory):
token = factory.LazyFunction(faker.sha256)
platform = FuzzyChoice(choices=Platform.values())
model_name = factory.LazyFunction(faker.word)
class Meta:
model = PhoneDevice
| StarcoderdataPython |
1654275 | <gh_stars>10-100
from ecommercetools.transactions.transactions import get_transactions
| StarcoderdataPython |
27599 | <filename>ImGen.py
#!/usr/bin/env python
#BSD 3-Clause License
#Copyright (c) 2017, <NAME>
#############################################
# CHANGE THESE VARS AS NEEDED
size = 10 #size of squares in mils
invert = False #Color invert the image
image_name = "test.png" #name of the image, can be BMP, PNG or JPG
#############################################
from PIL import Image
import numpy as np
im = Image.open(image_name)
im.load()
im = im.convert('1')
pixels = list(im.getdata())
width, height = im.size
pixels = [pixels[i * width:(i + 1) * width] for i in xrange(height)]
print height, width
def format_csv(i,x,y):
cv.write("\""+str(i)+"\",")
cv.write("\""+str(x*size)+"\",")
cv.write("\""+str((height - y - 1) *size)+"\",")
cv.write("\"\"\n")
with open(image_name[:-3]+"csv", 'w') as cv:
cv.write("\"Index\",\"X (mil)\",\"Y (mil)\",\"Arc Angle (Neg = CW)\"\n")
cv.write("\"0\",\"0\",\"0\",\"\"\n")
i = 1
comp = 0
if (invert): comp = 255
for y in range (0,height):
#print pixels[:][y] #For Debugging
for x in range (0,width):
if (pixels[y][x] == comp):
format_csv(i,x,y)
i+=1
format_csv(i,x,y-1)
i+=1
format_csv(i,x+1,y-1)
i+=1
format_csv(i,x+1,y)
i+=1
format_csv(i,x,y)
i+=1
cv.write("\""+str(i)+"\",")
cv.write("\"0\",\"0\",\"\"\n")
i+=1
| StarcoderdataPython |
97597 | <reponame>loobinsk/customer_project<gh_stars>0
from time import sleep
from django.core.management.base import BaseCommand, CommandError
# from products.tasks import domain_check
# class Command(BaseCommand):
# help = 'Check few site'
#
# # def add_arguments(self, parser):
# # parser.add_argument('poll_ids', nargs='+', type=int)
#
# def handle(self, *args, **options):
# no_delay = True
# for i in range(100):
# res = domain_check(no_delay)
# if res == -1:
# sleep(30)
#
# self.stdout.write(self.style.SUCCESS(f'Updated sites: {res}'))
# pass
| StarcoderdataPython |
3224946 | <gh_stars>1-10
# -*- coding: utf-8 -*-
# (C) 2013-2015 <NAME>
#
# This file is part of 'open-tamil' package tests
#
# setup the paths
from opentamiltests import *
import tamil.utf8 as utf8
from tamil.tscii import TSCII
import codecs
if PYTHON3:
class long(int):
pass
class NumeralStringLimitTests(unittest.TestCase):
def test_case_basic(self):
self.assertEqual(u"புள்ளி மூன்று மூன்று",tamil.numeral.num2tamilstr('0.33'))
self.assertEqual(u"புள்ளி ஒன்பது எட்டு ஏழு ஆறு",tamil.numeral.num2tamilstr('0.9876'))
def test_case_american(self):
self.assertEqual(u"புள்ளி மூன்று மூன்று",tamil.numeral.num2tamilstr_american('0.33'))
self.assertEqual(u"புள்ளி ஒன்பது எட்டு ஏழு ஆறு",tamil.numeral.num2tamilstr_american('0.9876'))
class NumeralTestAmerican(unittest.TestCase):
def runTest(self,var,nos):
for numerStr,num in zip(var,nos):
print('Testing ---> ',num)
self.assertEqual( numerStr, tamil.numeral.num2tamilstr_american( num ), num )
return
def test_friend_of_rama( self ):
ramanujan = 1729
gometra = tamil.numeral.num2tamilstr( ramanujan )
expected = u"ஓர் ஆயிரத்து எழுநூற்று இருபத்தொன்பது"
self.assertEqual( gometra, expected )
def test_units( self ):
units = (u'பூஜ்ஜியம்', u'ஒன்று', u'இரண்டு', u'மூன்று', u'நான்கு', u'ஐந்து', u'ஆறு', u'ஏழு', u'எட்டு', u'ஒன்பது', u'பத்து') # 0-10
self.runTest( units, range(0,11) )
return
def test_basic_pulli(self):
numerals = (u'புள்ளி ஐந்து', u'ஒன்று புள்ளி ஐந்து', u'இரண்டு புள்ளி ஐந்து', u'மூன்று புள்ளி ஐந்து', u'நான்கு புள்ளி ஐந்து', u'ஐந்து புள்ளி ஐந்து', u'ஆறு புள்ளி ஐந்து', u'ஏழு புள்ளி ஐந்து', u'எட்டு புள்ளி ஐந்து', u'ஒன்பது புள்ளி ஐந்து', u'பத்து புள்ளி ஐந்து')
numbers = [i+0.5 for i in range(0,11)]
self.runTest( numerals, numbers )
return
def test_teens( self ):
teens = (u'பதினொன்று ', u'பனிரண்டு ', u'பதிமூன்று ', u'பதினான்கு ', u'பதினைந்து ',u'பதினாறு ', u'பதினேழு ', u'பதினெட்டு ', u'பத்தொன்பது ') # 11-19
self.runTest( teens, range(11,20) )
return
def test_tens ( self ):
tens = (u'பத்து', u'இருபது', u'முப்பது', u'நாற்பது', u'ஐம்பது',u'அறுபது', u'எழுபது', u'எண்பது', u'தொன்னூறு') # 10-90
self.runTest( tens, range(10,100,10) )
return
def test_100s( self ):
hundreds = ( u'நூறு', u'இருநூறு ', u'முன்னூறு ', u'நாநூறு ',u'ஐநூறு ', u'அறுநூறு ', u'எழுநூறு ', u'எண்ணூறு ', u'தொள்ளாயிரம் ') #100 - 900
self.runTest( hundreds, range(100,1000,100) )
return
def test_max( self ):
maxno = long(1e15 - 1)
expected = u'தொள்ளாயிரத்து தொன்னூற்றொன்பது டிரில்லியன் தொள்ளாயிரத்து தொன்னூற்றொன்பது பில்லியன் தொள்ளாயிரத்து தொன்னூற்றொன்பது மில்லியன் தொள்ளாயிரத்து தொன்னூற்றொன்பது ஆயிரத்து தொள்ளாயிரத்து தொன்னூற்றொன்பது'
self.assertEqual( tamil.numeral.num2tamilstr_american( maxno ), expected )
return
def test_numerals(self):
var = {0:u"பூஜ்ஜியம்",
long(1e7):u"பத்து மில்லியன்",
long(1e9-1):u"தொள்ளாயிரத்து தொன்னூற்றொன்பது மில்லியன் தொள்ளாயிரத்து தொன்னூற்றொன்பது ஆயிரத்து தொள்ளாயிரத்து தொன்னூற்றொன்பது",
3060:u"மூன்று ஆயிரத்து அறுபது",
1:u"ஒன்று",
2:u"இரண்டு",
3:u"மூன்று",
5:u"ஐந்து",
10:u"பத்து",
11:u"பதினொன்று ",
17:u"பதினேழு ",
19:u"பத்தொன்பது ",
20:u"இருபது",
21:u"இருபத்தொன்று",
1051:u"ஓர் ஆயிரத்து ஐம்பத்தொன்று",
100000:u"நூறு ஆயிரம்",
100001:u"நூறு ஆயிரத்து ஒன்று",
10011:u"பத்து ஆயிரத்து பதினொன்று ",
49:u"நாற்பத்தொன்பது",
50:u"ஐம்பது",
55:u"ஐம்பத்தைந்து",
1000001:u"ஒரு மில்லியன் ஒன்று",
90:u"தொன்னூறு",
99:u"தொன்னூற்றொன்பது",
100:u"நூறு",
101:u"நூற்றி ஒன்று",
1000:u"ஓர் ஆயிரம்",
111:u"நூற்றி பதினொன்று ",
1000000000000:u"ஒரு டிரில்லியன்",
1011:u"ஓர் ஆயிரத்து பதினொன்று "}
for k,actual_v in var.items():
v = tamil.numeral.num2tamilstr_american(k)
print('verifying => # %d'%k)
self.assertEqual(v,actual_v,k)
return
class NumeralTest(unittest.TestCase):
def runTest(self,var,nos):
for numerStr,num in zip(var,nos):
print('Testing ---> ',num)
self.assertEqual( numerStr, tamil.numeral.num2tamilstr( num ), num )
return
def test_units( self ):
units = (u'பூஜ்ஜியம்', u'ஒன்று', u'இரண்டு', u'மூன்று', u'நான்கு', u'ஐந்து', u'ஆறு', u'ஏழு', u'எட்டு', u'ஒன்பது', u'பத்து') # 0-10
self.runTest( units, range(0,11) )
return
def test_teens( self ):
teens = (u'பதினொன்று ', u'பனிரண்டு ', u'பதிமூன்று ', u'பதினான்கு ', u'பதினைந்து ',u'பதினாறு ', u'பதினேழு ', u'பதினெட்டு ', u'பத்தொன்பது ') # 11-19
self.runTest( teens, range(11,20) )
return
def test_tens ( self ):
tens = (u'பத்து', u'இருபது', u'முப்பது', u'நாற்பது', u'ஐம்பது',u'அறுபது', u'எழுபது', u'எண்பது', u'தொன்னூறு') # 10-90
self.runTest( tens, range(10,100,10) )
return
def test_100s( self ):
hundreds = ( u'நூறு', u'இருநூறு ', u'முன்னூறு ', u'நாநூறு ',u'ஐநூறு ', u'அறுநூறு ', u'எழுநூறு ', u'எண்ணூறு ', u'தொள்ளாயிரம் ') #100 - 900
self.runTest( hundreds, range(100,1000,100) )
return
def test_max( self ):
maxno = long(1e12 - 1 )
expected = u'தொன்னூற்றொன்பது ஆயிரத்து தொள்ளாயிரத்து தொன்னூற்றொன்பது கோடியே தொன்னூற்றொன்பது இலட்சத்து தொன்னூற்றொன்பது ஆயிரத்து தொள்ளாயிரத்து தொன்னூற்றொன்பது'
self.assertEqual( tamil.numeral.num2tamilstr( maxno ), expected )
return
def test_numerals(self):
var = {0:u"பூஜ்ஜியம்",
3060:u"மூன்று ஆயிரத்து அறுபது",
1:u"ஒன்று",
2:u"இரண்டு",
3:u"மூன்று",
5:u"ஐந்து",
10:u"பத்து",
11:u"பதினொன்று ",
17:u"பதினேழு ",
19:u"பத்தொன்பது ",
20:u"இருபது",
21:u"இருபத்தொன்று",
1051:u"ஓர் ஆயிரத்து ஐம்பத்தொன்று",
100000:u"ஒரு இலட்சம்",
100001:u"ஒரு இலட்சத்து ஒன்று",
10011:u"பத்து ஆயிரத்து பதினொன்று ",
49:u"நாற்பத்தொன்பது",
50:u"ஐம்பது",
55:u"ஐம்பத்தைந்து",
1000001:u"பத்து இலட்சத்து ஒன்று",
90:u"தொன்னூறு",
99:u"தொன்னூற்றொன்பது",
100:u"நூறு",
101:u"நூற்றி ஒன்று",
1000:u"ஓர் ஆயிரம்",
111:u"நூற்றி பதினொன்று ",
1000000000000:u"ஒரு இலட்சம் கோடி ",
1011:u"ஓர் ஆயிரத்து பதினொன்று "}
for k,actual_v in var.items():
v = tamil.numeral.num2tamilstr(k)
print('verifying => # %d'%k)
self.assertEqual(v,actual_v,k)
return
class NumeralNegTest(unittest.TestCase):
def runTest(self,var,nos):
for numerStr,num in zip(var,nos):
print('Testing ---> ',num)
print('NumerString',numerStr)
self.maxDiff = None
self.assertEqual( numerStr, tamil.numeral.num2tamilstr( num ), num )
return
def test_100s( self ):
hundreds = ( u'- நூறு', u'- இருநூறு ', u'- முன்னூறு ', u'- நாநூறு ',u'- ஐநூறு ', u'- அறுநூறு ', u'- எழுநூறு ', u'- எண்ணூறு ', u'- தொள்ளாயிரம் ') #100 - 900
self.runTest( hundreds, range(-100,-1000,-100) )
return
def test_USA(self):
ramanujan = -1729
gometra = tamil.numeral.num2tamilstr( ramanujan )
expected = u"- ஓர் ஆயிரத்து எழுநூற்று இருபத்தொன்பது"
self.assertEqual( gometra, expected )
def test_3LKPLUS1(self):
x1 = 3e5 + 1
actual = tamil.numeral.num2tamilstr( x1 )
expected = u'மூன்று இலட்சத்து ஒன்று'
self.assertEqual( actual, expected )
def test_PI(self):
if PYTHON3:
print("Python3 has different rounding")
return
pie = 3.1415
expected = u'மூன்று புள்ளி ஒன்று நான்கு ஒன்று ஐந்து'
actual = tamil.numeral.num2tamilstr(pie)
actual_USA = tamil.numeral.num2tamilstr_american(pie)
self.assertEqual(actual,expected)
self.assertEqual(actual_USA,expected)
def test_PI_million(self):
pie = 3e6 + 0.1415
expected = u'மூன்று மில்லியன் புள்ளி ஒன்று நான்கு ஒன்று'
actual_USA = tamil.numeral.num2tamilstr_american(pie)
self.assertEqual(actual_USA[0:len(expected)],expected)
def test_PI_lakshalu(self):
pie = 3e5+0.1415
expected = u'மூன்று இலட்சம் புள்ளி ஒன்று நான்கு ஒன்று ஐந்து'
actual_IN = tamil.numeral.num2tamilstr(pie)
self.assertEqual(actual_IN[0:len(expected)],expected)
<EMAIL>If( PYTHON3, "Python3 has different rounding")
def test_INFRAC(self):
if PYTHON3:
print("Python3 has different rounding")
return
exp2 = u'ஓர் ஆயிரத்து ஒன்று புள்ளி நான்கு ஐந்து'
actual_IN2 = tamil.numeral.num2tamilstr(1001+0.45)
self.assertEqual(actual_IN2,exp2)
exp2 = u'ஓர் ஆயிரம் புள்ளி நான்கு ஐந்து'
actual_IN2 = tamil.numeral.num2tamilstr(1000+0.45)
self.assertEqual(actual_IN2,exp2)
def test_VITHIVILAKKU(self):
if PYTHON2_6:
# exception API is different in Python 2.6
return
with self.assertRaises(Exception):
tamil.numeral.num2tamilstr( complex(5,6) )
with self.assertRaises(Exception):
tamil.numeral.num2tamilstr( 'mannagatti' )
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1643812 | <filename>sdk/python/pulumi_kubernetes_ingress_nginx/_inputs.py<gh_stars>1-10
# coding=utf-8
# *** WARNING: this file was generated by Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
import pulumi_kubernetes
__all__ = [
'AutoscalingBehaviorScalingPolicyArgs',
'AutoscalingBehaviorScalingArgs',
'AutoscalingBehaviorArgs',
'AutoscalingTemplatePodsMetricArgs',
'AutoscalingTemplatePodsTargetArgs',
'AutoscalingTemplatePodsArgs',
'AutoscalingTemplateArgs',
'AutoscalingArgs',
'ContollerAdmissionWebhooksArgs',
'ControllerAdmissionWebhooksCreateSecretJobArgs',
'ControllerAdmissionWebhooksPatchWebhbookJobArgs',
'ControllerAdmissionWebhooksPatchArgs',
'ControllerAdmissionWebhooksServiceArgs',
'ControllerCustomTemplateArgs',
'ControllerDefaultBackendServiceArgs',
'ControllerDefaultBackendArgs',
'ControllerHostPortPortsArgs',
'ControllerHostPortArgs',
'ControllerImageArgs',
'ControllerIngressClassResourceArgs',
'ControllerMetricsPrometheusRulesArgs',
'ControllerMetricsServiceMonitorArgs',
'ControllerMetricsServiceArgs',
'ControllerMetricsArgs',
'ControllerPodSecurityPolicyArgs',
'ControllerPortArgs',
'ControllerPublishServiceArgs',
'ControllerRBACArgs',
'ControllerRollingUpdateArgs',
'ControllerScopeArgs',
'ControllerServiceAccountArgs',
'ControllerServiceInternalArgs',
'ControllerServiceNodePortsArgs',
'ControllerServiceArgs',
'ControllerTcpArgs',
'ControllerUdpArgs',
'ControllerUpdateStrategyArgs',
'ControllerArgs',
'KedaScaledObjectArgs',
'KedaTriggerArgs',
'KedaArgs',
'ReleaseArgs',
'RepositoryOptsArgs',
]
@pulumi.input_type
class AutoscalingBehaviorScalingPolicyArgs:
def __init__(__self__, *,
period_seconds: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None,
value: Optional[pulumi.Input[int]] = None):
if period_seconds is not None:
pulumi.set(__self__, "period_seconds", period_seconds)
if type is not None:
pulumi.set(__self__, "type", type)
if value is not None:
pulumi.set(__self__, "value", value)
@property
@pulumi.getter(name="periodSeconds")
def period_seconds(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "period_seconds")
@period_seconds.setter
def period_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "period_seconds", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def value(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "value")
@value.setter
def value(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "value", value)
@pulumi.input_type
class AutoscalingBehaviorScalingArgs:
def __init__(__self__, *,
policies: Optional[pulumi.Input[Sequence[pulumi.Input['AutoscalingBehaviorScalingPolicyArgs']]]] = None,
stabilization_window_seconds: Optional[pulumi.Input[int]] = None):
if policies is not None:
pulumi.set(__self__, "policies", policies)
if stabilization_window_seconds is not None:
pulumi.set(__self__, "stabilization_window_seconds", stabilization_window_seconds)
@property
@pulumi.getter
def policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AutoscalingBehaviorScalingPolicyArgs']]]]:
return pulumi.get(self, "policies")
@policies.setter
def policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AutoscalingBehaviorScalingPolicyArgs']]]]):
pulumi.set(self, "policies", value)
@property
@pulumi.getter(name="stabilizationWindowSeconds")
def stabilization_window_seconds(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "stabilization_window_seconds")
@stabilization_window_seconds.setter
def stabilization_window_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "stabilization_window_seconds", value)
@pulumi.input_type
class AutoscalingBehaviorArgs:
def __init__(__self__, *,
scale_down: Optional[pulumi.Input['AutoscalingBehaviorScalingArgs']] = None,
scale_up: Optional[pulumi.Input['AutoscalingBehaviorScalingArgs']] = None):
if scale_down is not None:
pulumi.set(__self__, "scale_down", scale_down)
if scale_up is not None:
pulumi.set(__self__, "scale_up", scale_up)
@property
@pulumi.getter(name="scaleDown")
def scale_down(self) -> Optional[pulumi.Input['AutoscalingBehaviorScalingArgs']]:
return pulumi.get(self, "scale_down")
@scale_down.setter
def scale_down(self, value: Optional[pulumi.Input['AutoscalingBehaviorScalingArgs']]):
pulumi.set(self, "scale_down", value)
@property
@pulumi.getter(name="scaleUp")
def scale_up(self) -> Optional[pulumi.Input['AutoscalingBehaviorScalingArgs']]:
return pulumi.get(self, "scale_up")
@scale_up.setter
def scale_up(self, value: Optional[pulumi.Input['AutoscalingBehaviorScalingArgs']]):
pulumi.set(self, "scale_up", value)
@pulumi.input_type
class AutoscalingTemplatePodsMetricArgs:
def __init__(__self__, *,
name: Optional[pulumi.Input[str]] = None):
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class AutoscalingTemplatePodsTargetArgs:
def __init__(__self__, *,
average_value: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
if average_value is not None:
pulumi.set(__self__, "average_value", average_value)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="averageValue")
def average_value(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "average_value")
@average_value.setter
def average_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "average_value", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class AutoscalingTemplatePodsArgs:
def __init__(__self__, *,
metric: Optional[pulumi.Input['AutoscalingTemplatePodsMetricArgs']] = None,
target: Optional[pulumi.Input['AutoscalingTemplatePodsTargetArgs']] = None):
if metric is not None:
pulumi.set(__self__, "metric", metric)
if target is not None:
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def metric(self) -> Optional[pulumi.Input['AutoscalingTemplatePodsMetricArgs']]:
return pulumi.get(self, "metric")
@metric.setter
def metric(self, value: Optional[pulumi.Input['AutoscalingTemplatePodsMetricArgs']]):
pulumi.set(self, "metric", value)
@property
@pulumi.getter
def target(self) -> Optional[pulumi.Input['AutoscalingTemplatePodsTargetArgs']]:
return pulumi.get(self, "target")
@target.setter
def target(self, value: Optional[pulumi.Input['AutoscalingTemplatePodsTargetArgs']]):
pulumi.set(self, "target", value)
@pulumi.input_type
class AutoscalingTemplateArgs:
def __init__(__self__, *,
pods: Optional[pulumi.Input['AutoscalingTemplatePodsArgs']] = None,
type: Optional[pulumi.Input[str]] = None):
if pods is not None:
pulumi.set(__self__, "pods", pods)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def pods(self) -> Optional[pulumi.Input['AutoscalingTemplatePodsArgs']]:
return pulumi.get(self, "pods")
@pods.setter
def pods(self, value: Optional[pulumi.Input['AutoscalingTemplatePodsArgs']]):
pulumi.set(self, "pods", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class AutoscalingArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
controller_autoscaling_behavior: Optional[pulumi.Input['AutoscalingBehaviorArgs']] = None,
enabled: Optional[pulumi.Input[bool]] = None,
max_replicas: Optional[pulumi.Input[int]] = None,
min_replicas: Optional[pulumi.Input[int]] = None,
target_cpu_utilization_percentage: Optional[pulumi.Input[int]] = None,
target_memory_utilization_percentage: Optional[pulumi.Input[int]] = None):
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if controller_autoscaling_behavior is not None:
pulumi.set(__self__, "controller_autoscaling_behavior", controller_autoscaling_behavior)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if max_replicas is not None:
pulumi.set(__self__, "max_replicas", max_replicas)
if min_replicas is not None:
pulumi.set(__self__, "min_replicas", min_replicas)
if target_cpu_utilization_percentage is not None:
pulumi.set(__self__, "target_cpu_utilization_percentage", target_cpu_utilization_percentage)
if target_memory_utilization_percentage is not None:
pulumi.set(__self__, "target_memory_utilization_percentage", target_memory_utilization_percentage)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="controllerAutoscalingBehavior")
def controller_autoscaling_behavior(self) -> Optional[pulumi.Input['AutoscalingBehaviorArgs']]:
return pulumi.get(self, "controller_autoscaling_behavior")
@controller_autoscaling_behavior.setter
def controller_autoscaling_behavior(self, value: Optional[pulumi.Input['AutoscalingBehaviorArgs']]):
pulumi.set(self, "controller_autoscaling_behavior", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="maxReplicas")
def max_replicas(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_replicas")
@max_replicas.setter
def max_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_replicas", value)
@property
@pulumi.getter(name="minReplicas")
def min_replicas(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_replicas")
@min_replicas.setter
def min_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_replicas", value)
@property
@pulumi.getter(name="targetCPUUtilizationPercentage")
def target_cpu_utilization_percentage(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "target_cpu_utilization_percentage")
@target_cpu_utilization_percentage.setter
def target_cpu_utilization_percentage(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "target_cpu_utilization_percentage", value)
@property
@pulumi.getter(name="targetMemoryUtilizationPercentage")
def target_memory_utilization_percentage(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "target_memory_utilization_percentage")
@target_memory_utilization_percentage.setter
def target_memory_utilization_percentage(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "target_memory_utilization_percentage", value)
@pulumi.input_type
class ContollerAdmissionWebhooksArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
certificate: Optional[pulumi.Input[str]] = None,
create_secret_job: Optional[pulumi.Input['ControllerAdmissionWebhooksCreateSecretJobArgs']] = None,
enabled: Optional[pulumi.Input[bool]] = None,
existing_psp: Optional[pulumi.Input[str]] = None,
failure_policy: Optional[pulumi.Input[str]] = None,
key: Optional[pulumi.Input[str]] = None,
namespace_selector: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
object_selector: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
patch: Optional[pulumi.Input['ControllerAdmissionWebhooksPatchArgs']] = None,
patch_webhook_job: Optional[pulumi.Input['ControllerAdmissionWebhooksPatchWebhbookJobArgs']] = None,
port: Optional[pulumi.Input[int]] = None,
service: Optional[pulumi.Input['ControllerAdmissionWebhooksServiceArgs']] = None,
timeout_seconds: Optional[pulumi.Input[int]] = None):
"""
:param pulumi.Input[str] existing_psp: Use an existing PSP instead of creating one.
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if certificate is not None:
pulumi.set(__self__, "certificate", certificate)
if create_secret_job is not None:
pulumi.set(__self__, "create_secret_job", create_secret_job)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if existing_psp is not None:
pulumi.set(__self__, "existing_psp", existing_psp)
if failure_policy is not None:
pulumi.set(__self__, "failure_policy", failure_policy)
if key is not None:
pulumi.set(__self__, "key", key)
if namespace_selector is not None:
pulumi.set(__self__, "namespace_selector", namespace_selector)
if object_selector is not None:
pulumi.set(__self__, "object_selector", object_selector)
if patch is not None:
pulumi.set(__self__, "patch", patch)
if patch_webhook_job is not None:
pulumi.set(__self__, "patch_webhook_job", patch_webhook_job)
if port is not None:
pulumi.set(__self__, "port", port)
if service is not None:
pulumi.set(__self__, "service", service)
if timeout_seconds is not None:
pulumi.set(__self__, "timeout_seconds", timeout_seconds)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter
def certificate(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "certificate")
@certificate.setter
def certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate", value)
@property
@pulumi.getter(name="createSecretJob")
def create_secret_job(self) -> Optional[pulumi.Input['ControllerAdmissionWebhooksCreateSecretJobArgs']]:
return pulumi.get(self, "create_secret_job")
@create_secret_job.setter
def create_secret_job(self, value: Optional[pulumi.Input['ControllerAdmissionWebhooksCreateSecretJobArgs']]):
pulumi.set(self, "create_secret_job", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="existingPsp")
def existing_psp(self) -> Optional[pulumi.Input[str]]:
"""
Use an existing PSP instead of creating one.
"""
return pulumi.get(self, "existing_psp")
@existing_psp.setter
def existing_psp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "existing_psp", value)
@property
@pulumi.getter(name="failurePolicy")
def failure_policy(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "failure_policy")
@failure_policy.setter
def failure_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "failure_policy", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="namespaceSelector")
def namespace_selector(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "namespace_selector")
@namespace_selector.setter
def namespace_selector(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "namespace_selector", value)
@property
@pulumi.getter(name="objectSelector")
def object_selector(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "object_selector")
@object_selector.setter
def object_selector(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "object_selector", value)
@property
@pulumi.getter
def patch(self) -> Optional[pulumi.Input['ControllerAdmissionWebhooksPatchArgs']]:
return pulumi.get(self, "patch")
@patch.setter
def patch(self, value: Optional[pulumi.Input['ControllerAdmissionWebhooksPatchArgs']]):
pulumi.set(self, "patch", value)
@property
@pulumi.getter(name="patchWebhookJob")
def patch_webhook_job(self) -> Optional[pulumi.Input['ControllerAdmissionWebhooksPatchWebhbookJobArgs']]:
return pulumi.get(self, "patch_webhook_job")
@patch_webhook_job.setter
def patch_webhook_job(self, value: Optional[pulumi.Input['ControllerAdmissionWebhooksPatchWebhbookJobArgs']]):
pulumi.set(self, "patch_webhook_job", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def service(self) -> Optional[pulumi.Input['ControllerAdmissionWebhooksServiceArgs']]:
return pulumi.get(self, "service")
@service.setter
def service(self, value: Optional[pulumi.Input['ControllerAdmissionWebhooksServiceArgs']]):
pulumi.set(self, "service", value)
@property
@pulumi.getter(name="timeoutSeconds")
def timeout_seconds(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "timeout_seconds")
@timeout_seconds.setter
def timeout_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout_seconds", value)
@pulumi.input_type
class ControllerAdmissionWebhooksCreateSecretJobArgs:
def __init__(__self__, *,
resources: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']] = None):
if resources is not None:
pulumi.set(__self__, "resources", resources)
@property
@pulumi.getter
def resources(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']]:
return pulumi.get(self, "resources")
@resources.setter
def resources(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']]):
pulumi.set(self, "resources", value)
@pulumi.input_type
class ControllerAdmissionWebhooksPatchWebhbookJobArgs:
def __init__(__self__, *,
resources: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']] = None):
if resources is not None:
pulumi.set(__self__, "resources", resources)
@property
@pulumi.getter
def resources(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']]:
return pulumi.get(self, "resources")
@resources.setter
def resources(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']]):
pulumi.set(self, "resources", value)
@pulumi.input_type
class ControllerAdmissionWebhooksPatchArgs:
def __init__(__self__, *,
enabled: Optional[pulumi.Input[bool]] = None,
image: Optional[pulumi.Input['ControllerImageArgs']] = None,
node_selector: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
pod_annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
priority_class_name: Optional[pulumi.Input[str]] = None,
run_as_user: Optional[pulumi.Input[int]] = None,
tolerations: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.TolerationArgs']]]] = None):
"""
:param pulumi.Input[str] priority_class_name: Provide a priority class name to the webhook patching job.
"""
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if image is not None:
pulumi.set(__self__, "image", image)
if node_selector is not None:
pulumi.set(__self__, "node_selector", node_selector)
if pod_annotations is not None:
pulumi.set(__self__, "pod_annotations", pod_annotations)
if priority_class_name is not None:
pulumi.set(__self__, "priority_class_name", priority_class_name)
if run_as_user is not None:
pulumi.set(__self__, "run_as_user", run_as_user)
if tolerations is not None:
pulumi.set(__self__, "tolerations", tolerations)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def image(self) -> Optional[pulumi.Input['ControllerImageArgs']]:
return pulumi.get(self, "image")
@image.setter
def image(self, value: Optional[pulumi.Input['ControllerImageArgs']]):
pulumi.set(self, "image", value)
@property
@pulumi.getter(name="nodeSelector")
def node_selector(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "node_selector")
@node_selector.setter
def node_selector(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "node_selector", value)
@property
@pulumi.getter(name="podAnnotations")
def pod_annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "pod_annotations")
@pod_annotations.setter
def pod_annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "pod_annotations", value)
@property
@pulumi.getter(name="priorityClassName")
def priority_class_name(self) -> Optional[pulumi.Input[str]]:
"""
Provide a priority class name to the webhook patching job.
"""
return pulumi.get(self, "priority_class_name")
@priority_class_name.setter
def priority_class_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "priority_class_name", value)
@property
@pulumi.getter(name="runAsUser")
def run_as_user(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "run_as_user")
@run_as_user.setter
def run_as_user(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "run_as_user", value)
@property
@pulumi.getter
def tolerations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.TolerationArgs']]]]:
return pulumi.get(self, "tolerations")
@tolerations.setter
def tolerations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.TolerationArgs']]]]):
pulumi.set(self, "tolerations", value)
@pulumi.input_type
class ControllerAdmissionWebhooksServiceArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
cluster_ip: Optional[pulumi.Input[str]] = None,
external_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
load_balancer_ips: Optional[pulumi.Input[str]] = None,
load_balancer_source_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
service_port: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None):
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if cluster_ip is not None:
pulumi.set(__self__, "cluster_ip", cluster_ip)
if external_ips is not None:
pulumi.set(__self__, "external_ips", external_ips)
if load_balancer_ips is not None:
pulumi.set(__self__, "load_balancer_ips", load_balancer_ips)
if load_balancer_source_ranges is not None:
pulumi.set(__self__, "load_balancer_source_ranges", load_balancer_source_ranges)
if service_port is not None:
pulumi.set(__self__, "service_port", service_port)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="clusterIP")
def cluster_ip(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_ip")
@cluster_ip.setter
def cluster_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_ip", value)
@property
@pulumi.getter(name="externalIPs")
def external_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "external_ips")
@external_ips.setter
def external_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "external_ips", value)
@property
@pulumi.getter(name="loadBalancerIPs")
def load_balancer_ips(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "load_balancer_ips")
@load_balancer_ips.setter
def load_balancer_ips(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancer_ips", value)
@property
@pulumi.getter(name="loadBalancerSourceRanges")
def load_balancer_source_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "load_balancer_source_ranges")
@load_balancer_source_ranges.setter
def load_balancer_source_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "load_balancer_source_ranges", value)
@property
@pulumi.getter(name="servicePort")
def service_port(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "service_port")
@service_port.setter
def service_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "service_port", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class ControllerCustomTemplateArgs:
def __init__(__self__, *,
config_map_key: Optional[pulumi.Input[str]] = None,
config_map_name: Optional[pulumi.Input[str]] = None):
if config_map_key is not None:
pulumi.set(__self__, "config_map_key", config_map_key)
if config_map_name is not None:
pulumi.set(__self__, "config_map_name", config_map_name)
@property
@pulumi.getter(name="configMapKey")
def config_map_key(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "config_map_key")
@config_map_key.setter
def config_map_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "config_map_key", value)
@property
@pulumi.getter(name="configMapName")
def config_map_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "config_map_name")
@config_map_name.setter
def config_map_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "config_map_name", value)
@pulumi.input_type
class ControllerDefaultBackendServiceArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
cluster_ip: Optional[pulumi.Input[str]] = None,
external_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
load_balancer_ip: Optional[pulumi.Input[str]] = None,
load_balancer_source_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
service_port: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input[str]]] external_ips: List of IP addresses at which the default backend service is available. Ref: https://kubernetes.io/docs/user-guide/services/#external-ips
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if cluster_ip is not None:
pulumi.set(__self__, "cluster_ip", cluster_ip)
if external_ips is not None:
pulumi.set(__self__, "external_ips", external_ips)
if load_balancer_ip is not None:
pulumi.set(__self__, "load_balancer_ip", load_balancer_ip)
if load_balancer_source_ranges is not None:
pulumi.set(__self__, "load_balancer_source_ranges", load_balancer_source_ranges)
if service_port is not None:
pulumi.set(__self__, "service_port", service_port)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="clusterIP")
def cluster_ip(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_ip")
@cluster_ip.setter
def cluster_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_ip", value)
@property
@pulumi.getter(name="externalIPs")
def external_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of IP addresses at which the default backend service is available. Ref: https://kubernetes.io/docs/user-guide/services/#external-ips
"""
return pulumi.get(self, "external_ips")
@external_ips.setter
def external_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "external_ips", value)
@property
@pulumi.getter(name="loadBalancerIP")
def load_balancer_ip(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "load_balancer_ip")
@load_balancer_ip.setter
def load_balancer_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancer_ip", value)
@property
@pulumi.getter(name="loadBalancerSourceRanges")
def load_balancer_source_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "load_balancer_source_ranges")
@load_balancer_source_ranges.setter
def load_balancer_source_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "load_balancer_source_ranges", value)
@property
@pulumi.getter(name="servicePort")
def service_port(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "service_port")
@service_port.setter
def service_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "service_port", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class ControllerDefaultBackendArgs:
def __init__(__self__, *,
affinity: Optional[pulumi.Input['pulumi_kubernetes.core.v1.AffinityArgs']] = None,
autoscaling: Optional[pulumi.Input['AutoscalingArgs']] = None,
enabled: Optional[pulumi.Input[bool]] = None,
existing_psp: Optional[pulumi.Input[str]] = None,
extra_args: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
extra_envs: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.EnvVarArgs']]]] = None,
extra_volume_mounts: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeMountArgs']]]] = None,
extra_volumes: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeArgs']]]] = None,
image: Optional[pulumi.Input['ControllerImageArgs']] = None,
liveness_probe: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']] = None,
min_available: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
node_selector: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
pod_annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
pod_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
pod_security_context: Optional[pulumi.Input['pulumi_kubernetes.core.v1.PodSecurityContextArgs']] = None,
port: Optional[pulumi.Input[int]] = None,
priority_class_name: Optional[pulumi.Input[str]] = None,
readiness_probe: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']] = None,
replica_count: Optional[pulumi.Input[int]] = None,
resources: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']] = None,
service: Optional[pulumi.Input['ControllerDefaultBackendServiceArgs']] = None,
service_account: Optional[pulumi.Input['ControllerServiceAccountArgs']] = None,
tolerations: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.TolerationArgs']]]] = None):
"""
:param pulumi.Input[str] existing_psp: Use an existing PSP instead of creating one.
:param pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeMountArgs']]] extra_volume_mounts: Additional volumeMounts to the default backend container. - name: copy-portal-skins mountPath: /var/lib/lemonldap-ng/portal/skins
:param pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeArgs']]] extra_volumes: Additional volumes to the default backend pod. - name: copy-portal-skins emptyDir: {}
:param pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs'] liveness_probe: Liveness probe values for default backend. Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] node_selector: Node labels for default backend pod assignment Ref: https://kubernetes.io/docs/user-guide/node-selection/.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] pod_annotations: Annotations to be added to default backend pods.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] pod_labels: labels to add to the pod container metadata
:param pulumi.Input['pulumi_kubernetes.core.v1.PodSecurityContextArgs'] pod_security_context: Security Context policies for controller pods. See https://kubernetes.io/docs/tasks/administer-cluster/sysctl-cluster/ for notes on enabling and using sysctls.
:param pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs'] readiness_probe: Readiness probe values for default backend. Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes.
:param pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.TolerationArgs']]] tolerations: Node tolerations for server scheduling to nodes with taints. Ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
"""
if affinity is not None:
pulumi.set(__self__, "affinity", affinity)
if autoscaling is not None:
pulumi.set(__self__, "autoscaling", autoscaling)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if existing_psp is not None:
pulumi.set(__self__, "existing_psp", existing_psp)
if extra_args is not None:
pulumi.set(__self__, "extra_args", extra_args)
if extra_envs is not None:
pulumi.set(__self__, "extra_envs", extra_envs)
if extra_volume_mounts is not None:
pulumi.set(__self__, "extra_volume_mounts", extra_volume_mounts)
if extra_volumes is not None:
pulumi.set(__self__, "extra_volumes", extra_volumes)
if image is not None:
pulumi.set(__self__, "image", image)
if liveness_probe is not None:
pulumi.set(__self__, "liveness_probe", liveness_probe)
if min_available is not None:
pulumi.set(__self__, "min_available", min_available)
if name is not None:
pulumi.set(__self__, "name", name)
if node_selector is not None:
pulumi.set(__self__, "node_selector", node_selector)
if pod_annotations is not None:
pulumi.set(__self__, "pod_annotations", pod_annotations)
if pod_labels is not None:
pulumi.set(__self__, "pod_labels", pod_labels)
if pod_security_context is not None:
pulumi.set(__self__, "pod_security_context", pod_security_context)
if port is not None:
pulumi.set(__self__, "port", port)
if priority_class_name is not None:
pulumi.set(__self__, "priority_class_name", priority_class_name)
if readiness_probe is not None:
pulumi.set(__self__, "readiness_probe", readiness_probe)
if replica_count is not None:
pulumi.set(__self__, "replica_count", replica_count)
if resources is not None:
pulumi.set(__self__, "resources", resources)
if service is not None:
pulumi.set(__self__, "service", service)
if service_account is not None:
pulumi.set(__self__, "service_account", service_account)
if tolerations is not None:
pulumi.set(__self__, "tolerations", tolerations)
@property
@pulumi.getter
def affinity(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.AffinityArgs']]:
return pulumi.get(self, "affinity")
@affinity.setter
def affinity(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.AffinityArgs']]):
pulumi.set(self, "affinity", value)
@property
@pulumi.getter
def autoscaling(self) -> Optional[pulumi.Input['AutoscalingArgs']]:
return pulumi.get(self, "autoscaling")
@autoscaling.setter
def autoscaling(self, value: Optional[pulumi.Input['AutoscalingArgs']]):
pulumi.set(self, "autoscaling", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="existingPsp")
def existing_psp(self) -> Optional[pulumi.Input[str]]:
"""
Use an existing PSP instead of creating one.
"""
return pulumi.get(self, "existing_psp")
@existing_psp.setter
def existing_psp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "existing_psp", value)
@property
@pulumi.getter(name="extraArgs")
def extra_args(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "extra_args")
@extra_args.setter
def extra_args(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "extra_args", value)
@property
@pulumi.getter(name="extraEnvs")
def extra_envs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.EnvVarArgs']]]]:
return pulumi.get(self, "extra_envs")
@extra_envs.setter
def extra_envs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.EnvVarArgs']]]]):
pulumi.set(self, "extra_envs", value)
@property
@pulumi.getter(name="extraVolumeMounts")
def extra_volume_mounts(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeMountArgs']]]]:
"""
Additional volumeMounts to the default backend container. - name: copy-portal-skins mountPath: /var/lib/lemonldap-ng/portal/skins
"""
return pulumi.get(self, "extra_volume_mounts")
@extra_volume_mounts.setter
def extra_volume_mounts(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeMountArgs']]]]):
pulumi.set(self, "extra_volume_mounts", value)
@property
@pulumi.getter(name="extraVolumes")
def extra_volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeArgs']]]]:
"""
Additional volumes to the default backend pod. - name: copy-portal-skins emptyDir: {}
"""
return pulumi.get(self, "extra_volumes")
@extra_volumes.setter
def extra_volumes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeArgs']]]]):
pulumi.set(self, "extra_volumes", value)
@property
@pulumi.getter
def image(self) -> Optional[pulumi.Input['ControllerImageArgs']]:
return pulumi.get(self, "image")
@image.setter
def image(self, value: Optional[pulumi.Input['ControllerImageArgs']]):
pulumi.set(self, "image", value)
@property
@pulumi.getter(name="livenessProbe")
def liveness_probe(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']]:
"""
Liveness probe values for default backend. Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes.
"""
return pulumi.get(self, "liveness_probe")
@liveness_probe.setter
def liveness_probe(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']]):
pulumi.set(self, "liveness_probe", value)
@property
@pulumi.getter(name="minAvailable")
def min_available(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_available")
@min_available.setter
def min_available(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_available", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nodeSelector")
def node_selector(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Node labels for default backend pod assignment Ref: https://kubernetes.io/docs/user-guide/node-selection/.
"""
return pulumi.get(self, "node_selector")
@node_selector.setter
def node_selector(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "node_selector", value)
@property
@pulumi.getter(name="podAnnotations")
def pod_annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Annotations to be added to default backend pods.
"""
return pulumi.get(self, "pod_annotations")
@pod_annotations.setter
def pod_annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "pod_annotations", value)
@property
@pulumi.getter(name="podLabels")
def pod_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
labels to add to the pod container metadata
"""
return pulumi.get(self, "pod_labels")
@pod_labels.setter
def pod_labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "pod_labels", value)
@property
@pulumi.getter(name="podSecurityContext")
def pod_security_context(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.PodSecurityContextArgs']]:
"""
Security Context policies for controller pods. See https://kubernetes.io/docs/tasks/administer-cluster/sysctl-cluster/ for notes on enabling and using sysctls.
"""
return pulumi.get(self, "pod_security_context")
@pod_security_context.setter
def pod_security_context(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.PodSecurityContextArgs']]):
pulumi.set(self, "pod_security_context", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="priorityClassName")
def priority_class_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "priority_class_name")
@priority_class_name.setter
def priority_class_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "priority_class_name", value)
@property
@pulumi.getter(name="readinessProbe")
def readiness_probe(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']]:
"""
Readiness probe values for default backend. Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes.
"""
return pulumi.get(self, "readiness_probe")
@readiness_probe.setter
def readiness_probe(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']]):
pulumi.set(self, "readiness_probe", value)
@property
@pulumi.getter(name="replicaCount")
def replica_count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "replica_count")
@replica_count.setter
def replica_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "replica_count", value)
@property
@pulumi.getter
def resources(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']]:
return pulumi.get(self, "resources")
@resources.setter
def resources(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']]):
pulumi.set(self, "resources", value)
@property
@pulumi.getter
def service(self) -> Optional[pulumi.Input['ControllerDefaultBackendServiceArgs']]:
return pulumi.get(self, "service")
@service.setter
def service(self, value: Optional[pulumi.Input['ControllerDefaultBackendServiceArgs']]):
pulumi.set(self, "service", value)
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> Optional[pulumi.Input['ControllerServiceAccountArgs']]:
return pulumi.get(self, "service_account")
@service_account.setter
def service_account(self, value: Optional[pulumi.Input['ControllerServiceAccountArgs']]):
pulumi.set(self, "service_account", value)
@property
@pulumi.getter
def tolerations(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.TolerationArgs']]]]:
"""
Node tolerations for server scheduling to nodes with taints. Ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/
"""
return pulumi.get(self, "tolerations")
@tolerations.setter
def tolerations(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.TolerationArgs']]]]):
pulumi.set(self, "tolerations", value)
@pulumi.input_type
class ControllerHostPortPortsArgs:
def __init__(__self__, *,
http: Optional[pulumi.Input[int]] = None,
https: Optional[pulumi.Input[int]] = None):
if http is not None:
pulumi.set(__self__, "http", http)
if https is not None:
pulumi.set(__self__, "https", https)
@property
@pulumi.getter
def http(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "http")
@http.setter
def http(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "http", value)
@property
@pulumi.getter
def https(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "https")
@https.setter
def https(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "https", value)
@pulumi.input_type
class ControllerHostPortArgs:
def __init__(__self__, *,
enabled: Optional[pulumi.Input[bool]] = None,
ports: Optional[pulumi.Input['ControllerHostPortPortsArgs']] = None):
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if ports is not None:
pulumi.set(__self__, "ports", ports)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def ports(self) -> Optional[pulumi.Input['ControllerHostPortPortsArgs']]:
return pulumi.get(self, "ports")
@ports.setter
def ports(self, value: Optional[pulumi.Input['ControllerHostPortPortsArgs']]):
pulumi.set(self, "ports", value)
@pulumi.input_type
class ControllerImageArgs:
def __init__(__self__, *,
allow_privilege_escalation: Optional[pulumi.Input[bool]] = None,
digest: Optional[pulumi.Input[str]] = None,
image: Optional[pulumi.Input[str]] = None,
pull_policy: Optional[pulumi.Input[str]] = None,
read_only_root_filesystem: Optional[pulumi.Input[bool]] = None,
registry: Optional[pulumi.Input[str]] = None,
repository: Optional[pulumi.Input[str]] = None,
run_as_non_root: Optional[pulumi.Input[bool]] = None,
run_as_user: Optional[pulumi.Input[str]] = None,
tag: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] repository: for backwards compatibility consider setting the full image url via the repository value below use *either* current default registry/image or repository format or installing will fail.
"""
if allow_privilege_escalation is not None:
pulumi.set(__self__, "allow_privilege_escalation", allow_privilege_escalation)
if digest is not None:
pulumi.set(__self__, "digest", digest)
if image is not None:
pulumi.set(__self__, "image", image)
if pull_policy is not None:
pulumi.set(__self__, "pull_policy", pull_policy)
if read_only_root_filesystem is not None:
pulumi.set(__self__, "read_only_root_filesystem", read_only_root_filesystem)
if registry is not None:
pulumi.set(__self__, "registry", registry)
if repository is not None:
pulumi.set(__self__, "repository", repository)
if run_as_non_root is not None:
pulumi.set(__self__, "run_as_non_root", run_as_non_root)
if run_as_user is not None:
pulumi.set(__self__, "run_as_user", run_as_user)
if tag is not None:
pulumi.set(__self__, "tag", tag)
@property
@pulumi.getter(name="allowPrivilegeEscalation")
def allow_privilege_escalation(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "allow_privilege_escalation")
@allow_privilege_escalation.setter
def allow_privilege_escalation(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow_privilege_escalation", value)
@property
@pulumi.getter
def digest(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "digest")
@digest.setter
def digest(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "digest", value)
@property
@pulumi.getter
def image(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "image")
@image.setter
def image(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image", value)
@property
@pulumi.getter(name="pullPolicy")
def pull_policy(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "pull_policy")
@pull_policy.setter
def pull_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pull_policy", value)
@property
@pulumi.getter(name="readOnlyRootFilesystem")
def read_only_root_filesystem(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "read_only_root_filesystem")
@read_only_root_filesystem.setter
def read_only_root_filesystem(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "read_only_root_filesystem", value)
@property
@pulumi.getter
def registry(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "registry")
@registry.setter
def registry(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "registry", value)
@property
@pulumi.getter
def repository(self) -> Optional[pulumi.Input[str]]:
"""
for backwards compatibility consider setting the full image url via the repository value below use *either* current default registry/image or repository format or installing will fail.
"""
return pulumi.get(self, "repository")
@repository.setter
def repository(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repository", value)
@property
@pulumi.getter(name="runAsNonRoot")
def run_as_non_root(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "run_as_non_root")
@run_as_non_root.setter
def run_as_non_root(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "run_as_non_root", value)
@property
@pulumi.getter(name="runAsUser")
def run_as_user(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "run_as_user")
@run_as_user.setter
def run_as_user(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "run_as_user", value)
@property
@pulumi.getter
def tag(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "tag")
@tag.setter
def tag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tag", value)
@pulumi.input_type
class ControllerIngressClassResourceArgs:
def __init__(__self__, *,
controller_value: Optional[pulumi.Input[str]] = None,
default: Optional[pulumi.Input[bool]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None,
parameters: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None):
"""
:param pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]] parameters: Parameters is a link to a custom resource containing additional configuration for the controller. This is optional if the controller does not require extra parameters.
"""
if controller_value is not None:
pulumi.set(__self__, "controller_value", controller_value)
if default is not None:
pulumi.set(__self__, "default", default)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if name is not None:
pulumi.set(__self__, "name", name)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
@property
@pulumi.getter(name="controllerValue")
def controller_value(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "controller_value")
@controller_value.setter
def controller_value(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "controller_value", value)
@property
@pulumi.getter
def default(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "default")
@default.setter
def default(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "default", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def parameters(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
"""
Parameters is a link to a custom resource containing additional configuration for the controller. This is optional if the controller does not require extra parameters.
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "parameters", value)
@pulumi.input_type
class ControllerMetricsPrometheusRulesArgs:
def __init__(__self__, *,
additional_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
namespace: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None):
if additional_labels is not None:
pulumi.set(__self__, "additional_labels", additional_labels)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if rules is not None:
pulumi.set(__self__, "rules", rules)
@property
@pulumi.getter(name="additionalLabels")
def additional_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "additional_labels")
@additional_labels.setter
def additional_labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "additional_labels", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter
def rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "rules")
@rules.setter
def rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "rules", value)
@pulumi.input_type
class ControllerMetricsServiceMonitorArgs:
def __init__(__self__, *,
additional_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
honor_labels: Optional[pulumi.Input[bool]] = None,
job_label: Optional[pulumi.Input[str]] = None,
metric_relabelings: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
namespace: Optional[pulumi.Input[str]] = None,
namespace_selector: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
scrape_interval: Optional[pulumi.Input[str]] = None,
target_labels: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] job_label: The label to use to retrieve the job name from.
"""
if additional_labels is not None:
pulumi.set(__self__, "additional_labels", additional_labels)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if honor_labels is not None:
pulumi.set(__self__, "honor_labels", honor_labels)
if job_label is not None:
pulumi.set(__self__, "job_label", job_label)
if metric_relabelings is not None:
pulumi.set(__self__, "metric_relabelings", metric_relabelings)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if namespace_selector is not None:
pulumi.set(__self__, "namespace_selector", namespace_selector)
if scrape_interval is not None:
pulumi.set(__self__, "scrape_interval", scrape_interval)
if target_labels is not None:
pulumi.set(__self__, "target_labels", target_labels)
@property
@pulumi.getter(name="additionalLabels")
def additional_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "additional_labels")
@additional_labels.setter
def additional_labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "additional_labels", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="honorLabels")
def honor_labels(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "honor_labels")
@honor_labels.setter
def honor_labels(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "honor_labels", value)
@property
@pulumi.getter(name="jobLabel")
def job_label(self) -> Optional[pulumi.Input[str]]:
"""
The label to use to retrieve the job name from.
"""
return pulumi.get(self, "job_label")
@job_label.setter
def job_label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "job_label", value)
@property
@pulumi.getter(name="metricRelabelings")
def metric_relabelings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "metric_relabelings")
@metric_relabelings.setter
def metric_relabelings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "metric_relabelings", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter(name="namespaceSelector")
def namespace_selector(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "namespace_selector")
@namespace_selector.setter
def namespace_selector(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "namespace_selector", value)
@property
@pulumi.getter(name="scrapeInterval")
def scrape_interval(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "scrape_interval")
@scrape_interval.setter
def scrape_interval(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "scrape_interval", value)
@property
@pulumi.getter(name="targetLabels")
def target_labels(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "target_labels")
@target_labels.setter
def target_labels(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "target_labels", value)
@pulumi.input_type
class ControllerMetricsServiceArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
cluster_ip: Optional[pulumi.Input[str]] = None,
external_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
external_traffic_policy: Optional[pulumi.Input[str]] = None,
load_balancer_ips: Optional[pulumi.Input[str]] = None,
load_balancer_source_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
node_port: Optional[pulumi.Input[str]] = None,
service_port: Optional[pulumi.Input[int]] = None,
type: Optional[pulumi.Input[str]] = None):
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if cluster_ip is not None:
pulumi.set(__self__, "cluster_ip", cluster_ip)
if external_ips is not None:
pulumi.set(__self__, "external_ips", external_ips)
if external_traffic_policy is not None:
pulumi.set(__self__, "external_traffic_policy", external_traffic_policy)
if load_balancer_ips is not None:
pulumi.set(__self__, "load_balancer_ips", load_balancer_ips)
if load_balancer_source_ranges is not None:
pulumi.set(__self__, "load_balancer_source_ranges", load_balancer_source_ranges)
if node_port is not None:
pulumi.set(__self__, "node_port", node_port)
if service_port is not None:
pulumi.set(__self__, "service_port", service_port)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="clusterIP")
def cluster_ip(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_ip")
@cluster_ip.setter
def cluster_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_ip", value)
@property
@pulumi.getter(name="externalIPs")
def external_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "external_ips")
@external_ips.setter
def external_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "external_ips", value)
@property
@pulumi.getter(name="externalTrafficPolicy")
def external_traffic_policy(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "external_traffic_policy")
@external_traffic_policy.setter
def external_traffic_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "external_traffic_policy", value)
@property
@pulumi.getter(name="loadBalancerIPs")
def load_balancer_ips(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "load_balancer_ips")
@load_balancer_ips.setter
def load_balancer_ips(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancer_ips", value)
@property
@pulumi.getter(name="loadBalancerSourceRanges")
def load_balancer_source_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "load_balancer_source_ranges")
@load_balancer_source_ranges.setter
def load_balancer_source_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "load_balancer_source_ranges", value)
@property
@pulumi.getter(name="nodePort")
def node_port(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "node_port")
@node_port.setter
def node_port(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_port", value)
@property
@pulumi.getter(name="servicePort")
def service_port(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "service_port")
@service_port.setter
def service_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "service_port", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class ControllerMetricsArgs:
def __init__(__self__, *,
enabled: Optional[pulumi.Input[bool]] = None,
port: Optional[pulumi.Input[int]] = None,
prometheus_rule: Optional[pulumi.Input['ControllerMetricsPrometheusRulesArgs']] = None,
service: Optional[pulumi.Input['ControllerMetricsServiceArgs']] = None,
service_monitor: Optional[pulumi.Input['ControllerMetricsServiceMonitorArgs']] = None):
"""
:param pulumi.Input[int] port: if this port is changed, change healthz-port: in extraArgs: accordingly.
"""
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if port is not None:
pulumi.set(__self__, "port", port)
if prometheus_rule is not None:
pulumi.set(__self__, "prometheus_rule", prometheus_rule)
if service is not None:
pulumi.set(__self__, "service", service)
if service_monitor is not None:
pulumi.set(__self__, "service_monitor", service_monitor)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
if this port is changed, change healthz-port: in extraArgs: accordingly.
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter(name="prometheusRule")
def prometheus_rule(self) -> Optional[pulumi.Input['ControllerMetricsPrometheusRulesArgs']]:
return pulumi.get(self, "prometheus_rule")
@prometheus_rule.setter
def prometheus_rule(self, value: Optional[pulumi.Input['ControllerMetricsPrometheusRulesArgs']]):
pulumi.set(self, "prometheus_rule", value)
@property
@pulumi.getter
def service(self) -> Optional[pulumi.Input['ControllerMetricsServiceArgs']]:
return pulumi.get(self, "service")
@service.setter
def service(self, value: Optional[pulumi.Input['ControllerMetricsServiceArgs']]):
pulumi.set(self, "service", value)
@property
@pulumi.getter(name="serviceMonitor")
def service_monitor(self) -> Optional[pulumi.Input['ControllerMetricsServiceMonitorArgs']]:
return pulumi.get(self, "service_monitor")
@service_monitor.setter
def service_monitor(self, value: Optional[pulumi.Input['ControllerMetricsServiceMonitorArgs']]):
pulumi.set(self, "service_monitor", value)
@pulumi.input_type
class ControllerPodSecurityPolicyArgs:
def __init__(__self__, *,
enabled: Optional[pulumi.Input[bool]] = None):
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class ControllerPortArgs:
def __init__(__self__, *,
http: Optional[pulumi.Input[int]] = None,
https: Optional[pulumi.Input[int]] = None):
if http is not None:
pulumi.set(__self__, "http", http)
if https is not None:
pulumi.set(__self__, "https", https)
@property
@pulumi.getter
def http(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "http")
@http.setter
def http(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "http", value)
@property
@pulumi.getter
def https(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "https")
@https.setter
def https(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "https", value)
@pulumi.input_type
class ControllerPublishServiceArgs:
def __init__(__self__, *,
enabled: Optional[pulumi.Input[bool]] = None,
path_override: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[str] path_override: Allows overriding of the publish service to bind to. Must be <namespace>/<service_name>.
"""
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if path_override is not None:
pulumi.set(__self__, "path_override", path_override)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="pathOverride")
def path_override(self) -> Optional[pulumi.Input[str]]:
"""
Allows overriding of the publish service to bind to. Must be <namespace>/<service_name>.
"""
return pulumi.get(self, "path_override")
@path_override.setter
def path_override(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path_override", value)
@pulumi.input_type
class ControllerRBACArgs:
def __init__(__self__, *,
create: Optional[pulumi.Input[bool]] = None,
scope: Optional[pulumi.Input[bool]] = None):
if create is not None:
pulumi.set(__self__, "create", create)
if scope is not None:
pulumi.set(__self__, "scope", scope)
@property
@pulumi.getter
def create(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "create")
@create.setter
def create(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "create", value)
@property
@pulumi.getter
def scope(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "scope")
@scope.setter
def scope(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "scope", value)
@pulumi.input_type
class ControllerRollingUpdateArgs:
def __init__(__self__, *,
max_unavailable: Optional[pulumi.Input[int]] = None):
if max_unavailable is not None:
pulumi.set(__self__, "max_unavailable", max_unavailable)
@property
@pulumi.getter(name="maxUnavailable")
def max_unavailable(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_unavailable")
@max_unavailable.setter
def max_unavailable(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_unavailable", value)
@pulumi.input_type
class ControllerScopeArgs:
def __init__(__self__, *,
enabled: Optional[pulumi.Input[bool]] = None,
namespace: Optional[pulumi.Input[str]] = None):
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@pulumi.input_type
class ControllerServiceAccountArgs:
def __init__(__self__, *,
automount_service_account_token: Optional[pulumi.Input[bool]] = None,
create: Optional[pulumi.Input[bool]] = None,
name: Optional[pulumi.Input[str]] = None):
if automount_service_account_token is not None:
pulumi.set(__self__, "automount_service_account_token", automount_service_account_token)
if create is not None:
pulumi.set(__self__, "create", create)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="automountServiceAccountToken")
def automount_service_account_token(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "automount_service_account_token")
@automount_service_account_token.setter
def automount_service_account_token(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "automount_service_account_token", value)
@property
@pulumi.getter
def create(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "create")
@create.setter
def create(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "create", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class ControllerServiceInternalArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
external_traffic_policy: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
load_balancer_ips: Optional[pulumi.Input[str]] = None,
load_balancer_source_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[str] external_traffic_policy: Set external traffic policy to: "Local" to preserve source IP on providers supporting it. Ref: https://kubernetes.io/docs/tutorials/services/source-ip/#source-ip-for-services-with-typeloadbalancer
:param pulumi.Input[Sequence[pulumi.Input[str]]] load_balancer_source_ranges: Restrict access For LoadBalancer service. Defaults to 0.0.0.0/0.
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if external_traffic_policy is not None:
pulumi.set(__self__, "external_traffic_policy", external_traffic_policy)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if load_balancer_ips is not None:
pulumi.set(__self__, "load_balancer_ips", load_balancer_ips)
if load_balancer_source_ranges is not None:
pulumi.set(__self__, "load_balancer_source_ranges", load_balancer_source_ranges)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="externalTrafficPolicy")
def external_traffic_policy(self) -> Optional[pulumi.Input[str]]:
"""
Set external traffic policy to: "Local" to preserve source IP on providers supporting it. Ref: https://kubernetes.io/docs/tutorials/services/source-ip/#source-ip-for-services-with-typeloadbalancer
"""
return pulumi.get(self, "external_traffic_policy")
@external_traffic_policy.setter
def external_traffic_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "external_traffic_policy", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter(name="loadBalancerIPs")
def load_balancer_ips(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "load_balancer_ips")
@load_balancer_ips.setter
def load_balancer_ips(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancer_ips", value)
@property
@pulumi.getter(name="loadBalancerSourceRanges")
def load_balancer_source_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Restrict access For LoadBalancer service. Defaults to 0.0.0.0/0.
"""
return pulumi.get(self, "load_balancer_source_ranges")
@load_balancer_source_ranges.setter
def load_balancer_source_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "load_balancer_source_ranges", value)
@pulumi.input_type
class ControllerServiceNodePortsArgs:
def __init__(__self__, *,
http: Optional[pulumi.Input[str]] = None,
https: Optional[pulumi.Input[str]] = None,
tcp: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
udp: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None):
if http is not None:
pulumi.set(__self__, "http", http)
if https is not None:
pulumi.set(__self__, "https", https)
if tcp is not None:
pulumi.set(__self__, "tcp", tcp)
if udp is not None:
pulumi.set(__self__, "udp", udp)
@property
@pulumi.getter
def http(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "http")
@http.setter
def http(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "http", value)
@property
@pulumi.getter
def https(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "https")
@https.setter
def https(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "https", value)
@property
@pulumi.getter
def tcp(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "tcp")
@tcp.setter
def tcp(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "tcp", value)
@property
@pulumi.getter
def udp(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "udp")
@udp.setter
def udp(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "udp", value)
@pulumi.input_type
class ControllerServiceArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
cluster_ip: Optional[pulumi.Input[str]] = None,
enable_http: Optional[pulumi.Input[bool]] = None,
enable_https: Optional[pulumi.Input[bool]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
external_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
external_traffic_policy: Optional[pulumi.Input[str]] = None,
health_check_node_port: Optional[pulumi.Input[int]] = None,
internal: Optional[pulumi.Input['ControllerServiceInternalArgs']] = None,
labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
load_balancer_ips: Optional[pulumi.Input[str]] = None,
load_balancer_source_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
node_ports: Optional[pulumi.Input['ControllerServiceNodePortsArgs']] = None,
ports: Optional[pulumi.Input['ControllerPortArgs']] = None,
session_affinity: Optional[pulumi.Input[str]] = None,
target_ports: Optional[pulumi.Input['ControllerPortArgs']] = None,
type: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Sequence[pulumi.Input[str]]] external_ips: List of IP addresses at which the controller services are available Ref: https://kubernetes.io/docs/user-guide/services/#external-ips
:param pulumi.Input[str] external_traffic_policy: Set external traffic policy to: "Local" to preserve source IP on providers supporting it. Ref: https://kubernetes.io/docs/tutorials/services/source-ip/#source-ip-for-services-with-typeloadbalancer
:param pulumi.Input[int] health_check_node_port: specifies the health check node port (numeric port number) for the service. If healthCheckNodePort isn’t specified, the service controller allocates a port from your cluster’s NodePort range. Ref: https://kubernetes.io/docs/tasks/access-application-cluster/create-external-load-balancer/#preserving-the-client-source-ip
:param pulumi.Input['ControllerServiceInternalArgs'] internal: Enables an additional internal load balancer (besides the external one). Annotations are mandatory for the load balancer to come up. Varies with the cloud service.
:param pulumi.Input[str] session_affinity: Must be either "None" or "ClientIP" if set. Kubernetes will default to "None". Ref: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if cluster_ip is not None:
pulumi.set(__self__, "cluster_ip", cluster_ip)
if enable_http is not None:
pulumi.set(__self__, "enable_http", enable_http)
if enable_https is not None:
pulumi.set(__self__, "enable_https", enable_https)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if external_ips is not None:
pulumi.set(__self__, "external_ips", external_ips)
if external_traffic_policy is not None:
pulumi.set(__self__, "external_traffic_policy", external_traffic_policy)
if health_check_node_port is not None:
pulumi.set(__self__, "health_check_node_port", health_check_node_port)
if internal is not None:
pulumi.set(__self__, "internal", internal)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if load_balancer_ips is not None:
pulumi.set(__self__, "load_balancer_ips", load_balancer_ips)
if load_balancer_source_ranges is not None:
pulumi.set(__self__, "load_balancer_source_ranges", load_balancer_source_ranges)
if node_ports is not None:
pulumi.set(__self__, "node_ports", node_ports)
if ports is not None:
pulumi.set(__self__, "ports", ports)
if session_affinity is not None:
pulumi.set(__self__, "session_affinity", session_affinity)
if target_ports is not None:
pulumi.set(__self__, "target_ports", target_ports)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="clusterIP")
def cluster_ip(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_ip")
@cluster_ip.setter
def cluster_ip(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_ip", value)
@property
@pulumi.getter(name="enableHttp")
def enable_http(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enable_http")
@enable_http.setter
def enable_http(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_http", value)
@property
@pulumi.getter(name="enableHttps")
def enable_https(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enable_https")
@enable_https.setter
def enable_https(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_https", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="externalIPs")
def external_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of IP addresses at which the controller services are available Ref: https://kubernetes.io/docs/user-guide/services/#external-ips
"""
return pulumi.get(self, "external_ips")
@external_ips.setter
def external_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "external_ips", value)
@property
@pulumi.getter(name="externalTrafficPolicy")
def external_traffic_policy(self) -> Optional[pulumi.Input[str]]:
"""
Set external traffic policy to: "Local" to preserve source IP on providers supporting it. Ref: https://kubernetes.io/docs/tutorials/services/source-ip/#source-ip-for-services-with-typeloadbalancer
"""
return pulumi.get(self, "external_traffic_policy")
@external_traffic_policy.setter
def external_traffic_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "external_traffic_policy", value)
@property
@pulumi.getter(name="healthCheckNodePort")
def health_check_node_port(self) -> Optional[pulumi.Input[int]]:
"""
specifies the health check node port (numeric port number) for the service. If healthCheckNodePort isn’t specified, the service controller allocates a port from your cluster’s NodePort range. Ref: https://kubernetes.io/docs/tasks/access-application-cluster/create-external-load-balancer/#preserving-the-client-source-ip
"""
return pulumi.get(self, "health_check_node_port")
@health_check_node_port.setter
def health_check_node_port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "health_check_node_port", value)
@property
@pulumi.getter
def internal(self) -> Optional[pulumi.Input['ControllerServiceInternalArgs']]:
"""
Enables an additional internal load balancer (besides the external one). Annotations are mandatory for the load balancer to come up. Varies with the cloud service.
"""
return pulumi.get(self, "internal")
@internal.setter
def internal(self, value: Optional[pulumi.Input['ControllerServiceInternalArgs']]):
pulumi.set(self, "internal", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter(name="loadBalancerIPs")
def load_balancer_ips(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "load_balancer_ips")
@load_balancer_ips.setter
def load_balancer_ips(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancer_ips", value)
@property
@pulumi.getter(name="loadBalancerSourceRanges")
def load_balancer_source_ranges(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "load_balancer_source_ranges")
@load_balancer_source_ranges.setter
def load_balancer_source_ranges(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "load_balancer_source_ranges", value)
@property
@pulumi.getter(name="nodePorts")
def node_ports(self) -> Optional[pulumi.Input['ControllerServiceNodePortsArgs']]:
return pulumi.get(self, "node_ports")
@node_ports.setter
def node_ports(self, value: Optional[pulumi.Input['ControllerServiceNodePortsArgs']]):
pulumi.set(self, "node_ports", value)
@property
@pulumi.getter
def ports(self) -> Optional[pulumi.Input['ControllerPortArgs']]:
return pulumi.get(self, "ports")
@ports.setter
def ports(self, value: Optional[pulumi.Input['ControllerPortArgs']]):
pulumi.set(self, "ports", value)
@property
@pulumi.getter(name="sessionAffinity")
def session_affinity(self) -> Optional[pulumi.Input[str]]:
"""
Must be either "None" or "ClientIP" if set. Kubernetes will default to "None". Ref: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies
"""
return pulumi.get(self, "session_affinity")
@session_affinity.setter
def session_affinity(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "session_affinity", value)
@property
@pulumi.getter(name="targetPorts")
def target_ports(self) -> Optional[pulumi.Input['ControllerPortArgs']]:
return pulumi.get(self, "target_ports")
@target_ports.setter
def target_ports(self, value: Optional[pulumi.Input['ControllerPortArgs']]):
pulumi.set(self, "target_ports", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class ControllerTcpArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
config_map_namespace: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] annotations: Annotations to be added to the tcp config configmap.
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if config_map_namespace is not None:
pulumi.set(__self__, "config_map_namespace", config_map_namespace)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Annotations to be added to the tcp config configmap.
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="configMapNamespace")
def config_map_namespace(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "config_map_namespace")
@config_map_namespace.setter
def config_map_namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "config_map_namespace", value)
@pulumi.input_type
class ControllerUdpArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
config_map_namespace: Optional[pulumi.Input[str]] = None):
"""
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] annotations: Annotations to be added to the udp config configmap.
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if config_map_namespace is not None:
pulumi.set(__self__, "config_map_namespace", config_map_namespace)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Annotations to be added to the udp config configmap.
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter(name="configMapNamespace")
def config_map_namespace(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "config_map_namespace")
@config_map_namespace.setter
def config_map_namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "config_map_namespace", value)
@pulumi.input_type
class ControllerUpdateStrategyArgs:
def __init__(__self__, *,
rolling_update: Optional[pulumi.Input['ControllerRollingUpdateArgs']] = None,
type: Optional[pulumi.Input[str]] = None):
if rolling_update is not None:
pulumi.set(__self__, "rolling_update", rolling_update)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="rollingUpdate")
def rolling_update(self) -> Optional[pulumi.Input['ControllerRollingUpdateArgs']]:
return pulumi.get(self, "rolling_update")
@rolling_update.setter
def rolling_update(self, value: Optional[pulumi.Input['ControllerRollingUpdateArgs']]):
pulumi.set(self, "rolling_update", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class ControllerArgs:
def __init__(__self__, *,
add_headers: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
admission_webhooks: Optional[pulumi.Input['ContollerAdmissionWebhooksArgs']] = None,
affinity: Optional[pulumi.Input['pulumi_kubernetes.core.v1.AffinityArgs']] = None,
allow_snippet_annotations: Optional[pulumi.Input[bool]] = None,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
autoscaling: Optional[pulumi.Input['AutoscalingArgs']] = None,
autoscaling_template: Optional[pulumi.Input[Sequence[pulumi.Input['AutoscalingTemplateArgs']]]] = None,
config: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
config_annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
config_map_namespace: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
container_port: Optional[pulumi.Input['ControllerPortArgs']] = None,
custom_template: Optional[pulumi.Input['ControllerCustomTemplateArgs']] = None,
dns_config: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
dns_policy: Optional[pulumi.Input[str]] = None,
election_id: Optional[pulumi.Input[str]] = None,
enable_mimalloc: Optional[pulumi.Input[bool]] = None,
existing_psp: Optional[pulumi.Input[str]] = None,
extra_args: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
extra_containers: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.ContainerArgs']]]] = None,
extra_envs: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.EnvVarArgs']]]] = None,
extra_init_containers: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.ContainerArgs']]]] = None,
extra_volume_mounts: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeMountArgs']]]] = None,
extra_volumes: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeArgs']]]] = None,
health_check_path: Optional[pulumi.Input[str]] = None,
heath_check_host: Optional[pulumi.Input[str]] = None,
host_network: Optional[pulumi.Input[bool]] = None,
host_port: Optional[pulumi.Input['ControllerHostPortArgs']] = None,
hostname: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
image: Optional[pulumi.Input['ControllerImageArgs']] = None,
ingress_class_by_name: Optional[pulumi.Input[bool]] = None,
ingress_class_resource: Optional[pulumi.Input['ControllerIngressClassResourceArgs']] = None,
keda: Optional[pulumi.Input['KedaArgs']] = None,
kind: Optional[pulumi.Input[str]] = None,
lifecycle: Optional[pulumi.Input['pulumi_kubernetes.core.v1.LifecycleArgs']] = None,
liveness_probe: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']] = None,
maxmind_license_key: Optional[pulumi.Input[str]] = None,
metrics: Optional[pulumi.Input['ControllerMetricsArgs']] = None,
min_available: Optional[pulumi.Input[int]] = None,
min_ready_seconds: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
node_selector: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
pod_annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
pod_labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
pod_security_context: Optional[pulumi.Input['pulumi_kubernetes.core.v1.PodSecurityContextArgs']] = None,
priority_class_name: Optional[pulumi.Input[str]] = None,
proxy_set_headers: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
publish_service: Optional[pulumi.Input['ControllerPublishServiceArgs']] = None,
readiness_probe: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']] = None,
replica_count: Optional[pulumi.Input[int]] = None,
report_node_internal_ip: Optional[pulumi.Input[bool]] = None,
resources: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']] = None,
scope: Optional[pulumi.Input['ControllerScopeArgs']] = None,
service: Optional[pulumi.Input['ControllerServiceArgs']] = None,
startup_probe: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']] = None,
sysctls: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
tcp: Optional[pulumi.Input['ControllerTcpArgs']] = None,
terminate_grace_period_seconds: Optional[pulumi.Input[int]] = None,
tolerations: Optional[pulumi.Input['pulumi_kubernetes.core.v1.TolerationArgs']] = None,
topology_spread_constraints: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.TopologySpreadConstraintArgs']]]] = None,
udp: Optional[pulumi.Input['ControllerUdpArgs']] = None,
update_strategy: Optional[pulumi.Input['ControllerUpdateStrategyArgs']] = None,
watch_ingress_without_class: Optional[pulumi.Input[bool]] = None):
"""
:param pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]] add_headers: Will add custom headers before sending response traffic to the client according to: https://kubernetes.github.io/ingress-nginx/user-guide/nginx-configuration/configmap/#add-headers.
:param pulumi.Input['pulumi_kubernetes.core.v1.AffinityArgs'] affinity: Affinity and anti-affinity Ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/#affinity-and-anti-affinity.
:param pulumi.Input[bool] allow_snippet_annotations: This configuration defines if Ingress Controller should allow users to set their own *-snippet annotations, otherwise this is forbidden / dropped when users add those annotations. Global snippets in ConfigMap are still respected.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] annotations: Annotations to be added to the controller Deployment or DaemonSet.
:param pulumi.Input['AutoscalingArgs'] autoscaling: Mutually exclusive with keda autoscaling.
:param pulumi.Input[Sequence[pulumi.Input['AutoscalingTemplateArgs']]] autoscaling_template: Custom or additional autoscaling metrics ref: https://kubernetes.io/docs/tasks/run-application/horizontal-pod-autoscale/#support-for-custom-metrics
:param pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]] config: Will add custom configuration options to Nginx https://kubernetes.github.io/ingress-nginx/user-guide/nginx-configuration/configmap/.
:param pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]] config_annotations: Annotations to be added to the controller config configuration configmap.
:param pulumi.Input[str] config_map_namespace: Allows customization of the configmap / nginx-configmap namespace.
:param pulumi.Input[str] container_name: Configures the controller container name.
:param pulumi.Input['ControllerPortArgs'] container_port: Configures the ports the nginx-controller listens on.
:param pulumi.Input['ControllerCustomTemplateArgs'] custom_template: Override NGINX template.
:param pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]] dns_config: Optionally customize the pod dnsConfig.
:param pulumi.Input[str] dns_policy: Optionally change this to ClusterFirstWithHostNet in case you have 'hostNetwork: true'. By default, while using host network, name resolution uses the host's DNS. If you wish nginx-controller to keep resolving names inside the k8s network, use ClusterFirstWithHostNet.
:param pulumi.Input[str] election_id: Election ID to use for status update.
:param pulumi.Input[bool] enable_mimalloc: Enable mimalloc as a drop-in replacement for malloc. ref: https://github.com/microsoft/mimalloc.
:param pulumi.Input[str] existing_psp: Use an existing PSP instead of creating one.
:param pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]] extra_args: Additional command line arguments to pass to nginx-ingress-controller E.g. to specify the default SSL certificate you can use `default-ssl-certificate: "<namespace>/<secret_name>"`.
:param pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.ContainerArgs']]] extra_containers: Additional containers to be added to the controller pod. See https://github.com/lemonldap-ng-controller/lemonldap-ng-controller as example.
:param pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.EnvVarArgs']]] extra_envs: Additional environment variables to set.
:param pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.ContainerArgs']]] extra_init_containers: Containers, which are run before the app containers are started. - name: init-myservice image: busybox command: ['sh', '-c', 'until nslookup myservice; do echo waiting for myservice; sleep 2; done;']
:param pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeMountArgs']]] extra_volume_mounts: Additional volumeMounts to the controller main container. - name: copy-portal-skins mountPath: /var/lib/lemonldap-ng/portal/skins
:param pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeArgs']]] extra_volumes: Additional volumes to the controller pod. - name: copy-portal-skins emptyDir: {}
:param pulumi.Input[str] health_check_path: Path of the health check endpoint. All requests received on the port defined by the healthz-port parameter are forwarded internally to this path.
:param pulumi.Input[str] heath_check_host: Address to bind the health check endpoint. It is better to set this option to the internal node address if the ingress nginx controller is running in the hostNetwork: true mode.
:param pulumi.Input[bool] host_network: Required for use with CNI based kubernetes installations (such as ones set up by kubeadm), since CNI and hostport don't mix yet. Can be deprecated once https://github.com/kubernetes/kubernetes/issues/23920 is merged.
:param pulumi.Input['ControllerHostPortArgs'] host_port: Use host ports 80 and 443. Disabled by default.
:param pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]] hostname: Optionally customize the pod hostname.
:param pulumi.Input[bool] ingress_class_by_name: Process IngressClass per name (additionally as per spec.controller).
:param pulumi.Input['ControllerIngressClassResourceArgs'] ingress_class_resource: This section refers to the creation of the IngressClass resource. IngressClass resources are supported since k8s >= 1.18 and required since k8s >= 1.19
:param pulumi.Input['KedaArgs'] keda: Mutually exclusive with hpa autoscaling.
:param pulumi.Input[str] kind: DaemonSet or Deployment.
:param pulumi.Input['pulumi_kubernetes.core.v1.LifecycleArgs'] lifecycle: Improve connection draining when ingress controller pod is deleted using a lifecycle hook: With this new hook, we increased the default terminationGracePeriodSeconds from 30 seconds to 300, allowing the draining of connections up to five minutes. If the active connections end before that, the pod will terminate gracefully at that time. To effectively take advantage of this feature, the Configmap feature worker-shutdown-timeout new value is 240s instead of 10s.
:param pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs'] liveness_probe: Liveness probe values Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes.
:param pulumi.Input[str] maxmind_license_key: Maxmind license key to download GeoLite2 Databases https://blog.maxmind.com/2019/12/18/significant-changes-to-accessing-and-using-geolite2-databases.
:param pulumi.Input[int] min_ready_seconds: minReadySeconds to avoid killing pods before we are ready.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] node_selector: Node labels for controller pod assignment Ref: https://kubernetes.io/docs/user-guide/node-selection/.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] pod_annotations: Annotations to be added to controller pods.
:param pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]] pod_labels: labels to add to the pod container metadata.
:param pulumi.Input['pulumi_kubernetes.core.v1.PodSecurityContextArgs'] pod_security_context: Security Context policies for controller pods.
:param pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]] proxy_set_headers: Will add custom headers before sending traffic to backends according to https://github.com/kubernetes/ingress-nginx/tree/main/docs/examples/customization/custom-headers.
:param pulumi.Input['ControllerPublishServiceArgs'] publish_service: Allows customization of the source of the IP address or FQDN to report in the ingress status field. By default, it reads the information provided by the service. If disable, the status field reports the IP address of the node or nodes where an ingress controller pod is running.
:param pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs'] readiness_probe: Readiness probe values Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes.
:param pulumi.Input[bool] report_node_internal_ip: Bare-metal considerations via the host network https://kubernetes.github.io/ingress-nginx/deploy/baremetal/#via-the-host-network Ingress status was blank because there is no Service exposing the NGINX Ingress controller in a configuration using the host network, the default --publish-service flag used in standard cloud setups does not apply.
:param pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs'] resources: Define requests resources to avoid probe issues due to CPU utilization in busy nodes ref: https://github.com/kubernetes/ingress-nginx/issues/4735#issuecomment-551204903 Ideally, there should be no limits. https://engineering.indeedblog.com/blog/2019/12/cpu-throttling-regression-fix/
:param pulumi.Input['ControllerScopeArgs'] scope: Limit the scope of the controller.
:param pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs'] startup_probe: Startup probe values Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes.
:param pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]] sysctls: See https://kubernetes.io/docs/tasks/administer-cluster/sysctl-cluster/ for notes on enabling and using sysctls.
:param pulumi.Input['ControllerTcpArgs'] tcp: Allows customization of the tcp-services-configmap.
:param pulumi.Input[int] terminate_grace_period_seconds: How long to wait for the drain of connections.
:param pulumi.Input['pulumi_kubernetes.core.v1.TolerationArgs'] tolerations: Node tolerations for server scheduling to nodes with taints Ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/.
:param pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.TopologySpreadConstraintArgs']]] topology_spread_constraints: Topology spread constraints rely on node labels to identify the topology domain(s) that each Node is in. Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-topology-spread-constraints/.
:param pulumi.Input['ControllerUpdateStrategyArgs'] update_strategy: The update strategy to apply to the Deployment or DaemonSet.
:param pulumi.Input[bool] watch_ingress_without_class: Process Ingress objects without ingressClass annotation/ingressClassName field. Overrides value for --watch-ingress-without-class flag of the controller binary. Defaults to false.
"""
if add_headers is not None:
pulumi.set(__self__, "add_headers", add_headers)
if admission_webhooks is not None:
pulumi.set(__self__, "admission_webhooks", admission_webhooks)
if affinity is not None:
pulumi.set(__self__, "affinity", affinity)
if allow_snippet_annotations is not None:
pulumi.set(__self__, "allow_snippet_annotations", allow_snippet_annotations)
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if autoscaling is not None:
pulumi.set(__self__, "autoscaling", autoscaling)
if autoscaling_template is not None:
pulumi.set(__self__, "autoscaling_template", autoscaling_template)
if config is not None:
pulumi.set(__self__, "config", config)
if config_annotations is not None:
pulumi.set(__self__, "config_annotations", config_annotations)
if config_map_namespace is not None:
pulumi.set(__self__, "config_map_namespace", config_map_namespace)
if container_name is not None:
pulumi.set(__self__, "container_name", container_name)
if container_port is not None:
pulumi.set(__self__, "container_port", container_port)
if custom_template is not None:
pulumi.set(__self__, "custom_template", custom_template)
if dns_config is not None:
pulumi.set(__self__, "dns_config", dns_config)
if dns_policy is not None:
pulumi.set(__self__, "dns_policy", dns_policy)
if election_id is not None:
pulumi.set(__self__, "election_id", election_id)
if enable_mimalloc is not None:
pulumi.set(__self__, "enable_mimalloc", enable_mimalloc)
if existing_psp is not None:
pulumi.set(__self__, "existing_psp", existing_psp)
if extra_args is not None:
pulumi.set(__self__, "extra_args", extra_args)
if extra_containers is not None:
pulumi.set(__self__, "extra_containers", extra_containers)
if extra_envs is not None:
pulumi.set(__self__, "extra_envs", extra_envs)
if extra_init_containers is not None:
pulumi.set(__self__, "extra_init_containers", extra_init_containers)
if extra_volume_mounts is not None:
pulumi.set(__self__, "extra_volume_mounts", extra_volume_mounts)
if extra_volumes is not None:
pulumi.set(__self__, "extra_volumes", extra_volumes)
if health_check_path is not None:
pulumi.set(__self__, "health_check_path", health_check_path)
if heath_check_host is not None:
pulumi.set(__self__, "heath_check_host", heath_check_host)
if host_network is not None:
pulumi.set(__self__, "host_network", host_network)
if host_port is not None:
pulumi.set(__self__, "host_port", host_port)
if hostname is not None:
pulumi.set(__self__, "hostname", hostname)
if image is not None:
pulumi.set(__self__, "image", image)
if ingress_class_by_name is not None:
pulumi.set(__self__, "ingress_class_by_name", ingress_class_by_name)
if ingress_class_resource is not None:
pulumi.set(__self__, "ingress_class_resource", ingress_class_resource)
if keda is not None:
pulumi.set(__self__, "keda", keda)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if lifecycle is not None:
pulumi.set(__self__, "lifecycle", lifecycle)
if liveness_probe is not None:
pulumi.set(__self__, "liveness_probe", liveness_probe)
if maxmind_license_key is not None:
pulumi.set(__self__, "maxmind_license_key", maxmind_license_key)
if metrics is not None:
pulumi.set(__self__, "metrics", metrics)
if min_available is not None:
pulumi.set(__self__, "min_available", min_available)
if min_ready_seconds is not None:
pulumi.set(__self__, "min_ready_seconds", min_ready_seconds)
if name is not None:
pulumi.set(__self__, "name", name)
if node_selector is not None:
pulumi.set(__self__, "node_selector", node_selector)
if pod_annotations is not None:
pulumi.set(__self__, "pod_annotations", pod_annotations)
if pod_labels is not None:
pulumi.set(__self__, "pod_labels", pod_labels)
if pod_security_context is not None:
pulumi.set(__self__, "pod_security_context", pod_security_context)
if priority_class_name is not None:
pulumi.set(__self__, "priority_class_name", priority_class_name)
if proxy_set_headers is not None:
pulumi.set(__self__, "proxy_set_headers", proxy_set_headers)
if publish_service is not None:
pulumi.set(__self__, "publish_service", publish_service)
if readiness_probe is not None:
pulumi.set(__self__, "readiness_probe", readiness_probe)
if replica_count is not None:
pulumi.set(__self__, "replica_count", replica_count)
if report_node_internal_ip is not None:
pulumi.set(__self__, "report_node_internal_ip", report_node_internal_ip)
if resources is not None:
pulumi.set(__self__, "resources", resources)
if scope is not None:
pulumi.set(__self__, "scope", scope)
if service is not None:
pulumi.set(__self__, "service", service)
if startup_probe is not None:
pulumi.set(__self__, "startup_probe", startup_probe)
if sysctls is not None:
pulumi.set(__self__, "sysctls", sysctls)
if tcp is not None:
pulumi.set(__self__, "tcp", tcp)
if terminate_grace_period_seconds is not None:
pulumi.set(__self__, "terminate_grace_period_seconds", terminate_grace_period_seconds)
if tolerations is not None:
pulumi.set(__self__, "tolerations", tolerations)
if topology_spread_constraints is not None:
pulumi.set(__self__, "topology_spread_constraints", topology_spread_constraints)
if udp is not None:
pulumi.set(__self__, "udp", udp)
if update_strategy is not None:
pulumi.set(__self__, "update_strategy", update_strategy)
if watch_ingress_without_class is not None:
pulumi.set(__self__, "watch_ingress_without_class", watch_ingress_without_class)
@property
@pulumi.getter(name="addHeaders")
def add_headers(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
"""
Will add custom headers before sending response traffic to the client according to: https://kubernetes.github.io/ingress-nginx/user-guide/nginx-configuration/configmap/#add-headers.
"""
return pulumi.get(self, "add_headers")
@add_headers.setter
def add_headers(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "add_headers", value)
@property
@pulumi.getter(name="admissionWebhooks")
def admission_webhooks(self) -> Optional[pulumi.Input['ContollerAdmissionWebhooksArgs']]:
return pulumi.get(self, "admission_webhooks")
@admission_webhooks.setter
def admission_webhooks(self, value: Optional[pulumi.Input['ContollerAdmissionWebhooksArgs']]):
pulumi.set(self, "admission_webhooks", value)
@property
@pulumi.getter
def affinity(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.AffinityArgs']]:
"""
Affinity and anti-affinity Ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/#affinity-and-anti-affinity.
"""
return pulumi.get(self, "affinity")
@affinity.setter
def affinity(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.AffinityArgs']]):
pulumi.set(self, "affinity", value)
@property
@pulumi.getter(name="allowSnippetAnnotations")
def allow_snippet_annotations(self) -> Optional[pulumi.Input[bool]]:
"""
This configuration defines if Ingress Controller should allow users to set their own *-snippet annotations, otherwise this is forbidden / dropped when users add those annotations. Global snippets in ConfigMap are still respected.
"""
return pulumi.get(self, "allow_snippet_annotations")
@allow_snippet_annotations.setter
def allow_snippet_annotations(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow_snippet_annotations", value)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Annotations to be added to the controller Deployment or DaemonSet.
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter
def autoscaling(self) -> Optional[pulumi.Input['AutoscalingArgs']]:
"""
Mutually exclusive with keda autoscaling.
"""
return pulumi.get(self, "autoscaling")
@autoscaling.setter
def autoscaling(self, value: Optional[pulumi.Input['AutoscalingArgs']]):
pulumi.set(self, "autoscaling", value)
@property
@pulumi.getter(name="autoscalingTemplate")
def autoscaling_template(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AutoscalingTemplateArgs']]]]:
"""
Custom or additional autoscaling metrics ref: https://kubernetes.io/docs/tasks/run-application/horizontal-pod-autoscale/#support-for-custom-metrics
"""
return pulumi.get(self, "autoscaling_template")
@autoscaling_template.setter
def autoscaling_template(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AutoscalingTemplateArgs']]]]):
pulumi.set(self, "autoscaling_template", value)
@property
@pulumi.getter
def config(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
"""
Will add custom configuration options to Nginx https://kubernetes.github.io/ingress-nginx/user-guide/nginx-configuration/configmap/.
"""
return pulumi.get(self, "config")
@config.setter
def config(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "config", value)
@property
@pulumi.getter(name="configAnnotations")
def config_annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
"""
Annotations to be added to the controller config configuration configmap.
"""
return pulumi.get(self, "config_annotations")
@config_annotations.setter
def config_annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "config_annotations", value)
@property
@pulumi.getter(name="configMapNamespace")
def config_map_namespace(self) -> Optional[pulumi.Input[str]]:
"""
Allows customization of the configmap / nginx-configmap namespace.
"""
return pulumi.get(self, "config_map_namespace")
@config_map_namespace.setter
def config_map_namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "config_map_namespace", value)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> Optional[pulumi.Input[str]]:
"""
Configures the controller container name.
"""
return pulumi.get(self, "container_name")
@container_name.setter
def container_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "container_name", value)
@property
@pulumi.getter(name="containerPort")
def container_port(self) -> Optional[pulumi.Input['ControllerPortArgs']]:
"""
Configures the ports the nginx-controller listens on.
"""
return pulumi.get(self, "container_port")
@container_port.setter
def container_port(self, value: Optional[pulumi.Input['ControllerPortArgs']]):
pulumi.set(self, "container_port", value)
@property
@pulumi.getter(name="customTemplate")
def custom_template(self) -> Optional[pulumi.Input['ControllerCustomTemplateArgs']]:
"""
Override NGINX template.
"""
return pulumi.get(self, "custom_template")
@custom_template.setter
def custom_template(self, value: Optional[pulumi.Input['ControllerCustomTemplateArgs']]):
pulumi.set(self, "custom_template", value)
@property
@pulumi.getter(name="dnsConfig")
def dns_config(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
"""
Optionally customize the pod dnsConfig.
"""
return pulumi.get(self, "dns_config")
@dns_config.setter
def dns_config(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "dns_config", value)
@property
@pulumi.getter(name="dnsPolicy")
def dns_policy(self) -> Optional[pulumi.Input[str]]:
"""
Optionally change this to ClusterFirstWithHostNet in case you have 'hostNetwork: true'. By default, while using host network, name resolution uses the host's DNS. If you wish nginx-controller to keep resolving names inside the k8s network, use ClusterFirstWithHostNet.
"""
return pulumi.get(self, "dns_policy")
@dns_policy.setter
def dns_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dns_policy", value)
@property
@pulumi.getter(name="electionID")
def election_id(self) -> Optional[pulumi.Input[str]]:
"""
Election ID to use for status update.
"""
return pulumi.get(self, "election_id")
@election_id.setter
def election_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "election_id", value)
@property
@pulumi.getter(name="enableMimalloc")
def enable_mimalloc(self) -> Optional[pulumi.Input[bool]]:
"""
Enable mimalloc as a drop-in replacement for malloc. ref: https://github.com/microsoft/mimalloc.
"""
return pulumi.get(self, "enable_mimalloc")
@enable_mimalloc.setter
def enable_mimalloc(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_mimalloc", value)
@property
@pulumi.getter(name="existingPsp")
def existing_psp(self) -> Optional[pulumi.Input[str]]:
"""
Use an existing PSP instead of creating one.
"""
return pulumi.get(self, "existing_psp")
@existing_psp.setter
def existing_psp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "existing_psp", value)
@property
@pulumi.getter(name="extraArgs")
def extra_args(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
"""
Additional command line arguments to pass to nginx-ingress-controller E.g. to specify the default SSL certificate you can use `default-ssl-certificate: "<namespace>/<secret_name>"`.
"""
return pulumi.get(self, "extra_args")
@extra_args.setter
def extra_args(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "extra_args", value)
@property
@pulumi.getter(name="extraContainers")
def extra_containers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.ContainerArgs']]]]:
"""
Additional containers to be added to the controller pod. See https://github.com/lemonldap-ng-controller/lemonldap-ng-controller as example.
"""
return pulumi.get(self, "extra_containers")
@extra_containers.setter
def extra_containers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.ContainerArgs']]]]):
pulumi.set(self, "extra_containers", value)
@property
@pulumi.getter(name="extraEnvs")
def extra_envs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.EnvVarArgs']]]]:
"""
Additional environment variables to set.
"""
return pulumi.get(self, "extra_envs")
@extra_envs.setter
def extra_envs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.EnvVarArgs']]]]):
pulumi.set(self, "extra_envs", value)
@property
@pulumi.getter(name="extraInitContainers")
def extra_init_containers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.ContainerArgs']]]]:
"""
Containers, which are run before the app containers are started. - name: init-myservice image: busybox command: ['sh', '-c', 'until nslookup myservice; do echo waiting for myservice; sleep 2; done;']
"""
return pulumi.get(self, "extra_init_containers")
@extra_init_containers.setter
def extra_init_containers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.ContainerArgs']]]]):
pulumi.set(self, "extra_init_containers", value)
@property
@pulumi.getter(name="extraVolumeMounts")
def extra_volume_mounts(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeMountArgs']]]]:
"""
Additional volumeMounts to the controller main container. - name: copy-portal-skins mountPath: /var/lib/lemonldap-ng/portal/skins
"""
return pulumi.get(self, "extra_volume_mounts")
@extra_volume_mounts.setter
def extra_volume_mounts(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeMountArgs']]]]):
pulumi.set(self, "extra_volume_mounts", value)
@property
@pulumi.getter(name="extraVolumes")
def extra_volumes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeArgs']]]]:
"""
Additional volumes to the controller pod. - name: copy-portal-skins emptyDir: {}
"""
return pulumi.get(self, "extra_volumes")
@extra_volumes.setter
def extra_volumes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.VolumeArgs']]]]):
pulumi.set(self, "extra_volumes", value)
@property
@pulumi.getter(name="healthCheckPath")
def health_check_path(self) -> Optional[pulumi.Input[str]]:
"""
Path of the health check endpoint. All requests received on the port defined by the healthz-port parameter are forwarded internally to this path.
"""
return pulumi.get(self, "health_check_path")
@health_check_path.setter
def health_check_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "health_check_path", value)
@property
@pulumi.getter(name="heathCheckHost")
def heath_check_host(self) -> Optional[pulumi.Input[str]]:
"""
Address to bind the health check endpoint. It is better to set this option to the internal node address if the ingress nginx controller is running in the hostNetwork: true mode.
"""
return pulumi.get(self, "heath_check_host")
@heath_check_host.setter
def heath_check_host(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "heath_check_host", value)
@property
@pulumi.getter(name="hostNetwork")
def host_network(self) -> Optional[pulumi.Input[bool]]:
"""
Required for use with CNI based kubernetes installations (such as ones set up by kubeadm), since CNI and hostport don't mix yet. Can be deprecated once https://github.com/kubernetes/kubernetes/issues/23920 is merged.
"""
return pulumi.get(self, "host_network")
@host_network.setter
def host_network(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "host_network", value)
@property
@pulumi.getter(name="hostPort")
def host_port(self) -> Optional[pulumi.Input['ControllerHostPortArgs']]:
"""
Use host ports 80 and 443. Disabled by default.
"""
return pulumi.get(self, "host_port")
@host_port.setter
def host_port(self, value: Optional[pulumi.Input['ControllerHostPortArgs']]):
pulumi.set(self, "host_port", value)
@property
@pulumi.getter
def hostname(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
"""
Optionally customize the pod hostname.
"""
return pulumi.get(self, "hostname")
@hostname.setter
def hostname(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "hostname", value)
@property
@pulumi.getter
def image(self) -> Optional[pulumi.Input['ControllerImageArgs']]:
return pulumi.get(self, "image")
@image.setter
def image(self, value: Optional[pulumi.Input['ControllerImageArgs']]):
pulumi.set(self, "image", value)
@property
@pulumi.getter(name="ingressClassByName")
def ingress_class_by_name(self) -> Optional[pulumi.Input[bool]]:
"""
Process IngressClass per name (additionally as per spec.controller).
"""
return pulumi.get(self, "ingress_class_by_name")
@ingress_class_by_name.setter
def ingress_class_by_name(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "ingress_class_by_name", value)
@property
@pulumi.getter(name="ingressClassResource")
def ingress_class_resource(self) -> Optional[pulumi.Input['ControllerIngressClassResourceArgs']]:
"""
This section refers to the creation of the IngressClass resource. IngressClass resources are supported since k8s >= 1.18 and required since k8s >= 1.19
"""
return pulumi.get(self, "ingress_class_resource")
@ingress_class_resource.setter
def ingress_class_resource(self, value: Optional[pulumi.Input['ControllerIngressClassResourceArgs']]):
pulumi.set(self, "ingress_class_resource", value)
@property
@pulumi.getter
def keda(self) -> Optional[pulumi.Input['KedaArgs']]:
"""
Mutually exclusive with hpa autoscaling.
"""
return pulumi.get(self, "keda")
@keda.setter
def keda(self, value: Optional[pulumi.Input['KedaArgs']]):
pulumi.set(self, "keda", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
DaemonSet or Deployment.
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def lifecycle(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.LifecycleArgs']]:
"""
Improve connection draining when ingress controller pod is deleted using a lifecycle hook: With this new hook, we increased the default terminationGracePeriodSeconds from 30 seconds to 300, allowing the draining of connections up to five minutes. If the active connections end before that, the pod will terminate gracefully at that time. To effectively take advantage of this feature, the Configmap feature worker-shutdown-timeout new value is 240s instead of 10s.
"""
return pulumi.get(self, "lifecycle")
@lifecycle.setter
def lifecycle(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.LifecycleArgs']]):
pulumi.set(self, "lifecycle", value)
@property
@pulumi.getter(name="livenessProbe")
def liveness_probe(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']]:
"""
Liveness probe values Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes.
"""
return pulumi.get(self, "liveness_probe")
@liveness_probe.setter
def liveness_probe(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']]):
pulumi.set(self, "liveness_probe", value)
@property
@pulumi.getter(name="maxmindLicenseKey")
def maxmind_license_key(self) -> Optional[pulumi.Input[str]]:
"""
Maxmind license key to download GeoLite2 Databases https://blog.maxmind.com/2019/12/18/significant-changes-to-accessing-and-using-geolite2-databases.
"""
return pulumi.get(self, "maxmind_license_key")
@maxmind_license_key.setter
def maxmind_license_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "maxmind_license_key", value)
@property
@pulumi.getter
def metrics(self) -> Optional[pulumi.Input['ControllerMetricsArgs']]:
return pulumi.get(self, "metrics")
@metrics.setter
def metrics(self, value: Optional[pulumi.Input['ControllerMetricsArgs']]):
pulumi.set(self, "metrics", value)
@property
@pulumi.getter(name="minAvailable")
def min_available(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_available")
@min_available.setter
def min_available(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_available", value)
@property
@pulumi.getter(name="minReadySeconds")
def min_ready_seconds(self) -> Optional[pulumi.Input[int]]:
"""
minReadySeconds to avoid killing pods before we are ready.
"""
return pulumi.get(self, "min_ready_seconds")
@min_ready_seconds.setter
def min_ready_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_ready_seconds", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nodeSelector")
def node_selector(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Node labels for controller pod assignment Ref: https://kubernetes.io/docs/user-guide/node-selection/.
"""
return pulumi.get(self, "node_selector")
@node_selector.setter
def node_selector(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "node_selector", value)
@property
@pulumi.getter(name="podAnnotations")
def pod_annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Annotations to be added to controller pods.
"""
return pulumi.get(self, "pod_annotations")
@pod_annotations.setter
def pod_annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "pod_annotations", value)
@property
@pulumi.getter(name="podLabels")
def pod_labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
"""
labels to add to the pod container metadata.
"""
return pulumi.get(self, "pod_labels")
@pod_labels.setter
def pod_labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "pod_labels", value)
@property
@pulumi.getter(name="podSecurityContext")
def pod_security_context(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.PodSecurityContextArgs']]:
"""
Security Context policies for controller pods.
"""
return pulumi.get(self, "pod_security_context")
@pod_security_context.setter
def pod_security_context(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.PodSecurityContextArgs']]):
pulumi.set(self, "pod_security_context", value)
@property
@pulumi.getter(name="priorityClassName")
def priority_class_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "priority_class_name")
@priority_class_name.setter
def priority_class_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "priority_class_name", value)
@property
@pulumi.getter(name="proxySetHeaders")
def proxy_set_headers(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
"""
Will add custom headers before sending traffic to backends according to https://github.com/kubernetes/ingress-nginx/tree/main/docs/examples/customization/custom-headers.
"""
return pulumi.get(self, "proxy_set_headers")
@proxy_set_headers.setter
def proxy_set_headers(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "proxy_set_headers", value)
@property
@pulumi.getter(name="publishService")
def publish_service(self) -> Optional[pulumi.Input['ControllerPublishServiceArgs']]:
"""
Allows customization of the source of the IP address or FQDN to report in the ingress status field. By default, it reads the information provided by the service. If disable, the status field reports the IP address of the node or nodes where an ingress controller pod is running.
"""
return pulumi.get(self, "publish_service")
@publish_service.setter
def publish_service(self, value: Optional[pulumi.Input['ControllerPublishServiceArgs']]):
pulumi.set(self, "publish_service", value)
@property
@pulumi.getter(name="readinessProbe")
def readiness_probe(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']]:
"""
Readiness probe values Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes.
"""
return pulumi.get(self, "readiness_probe")
@readiness_probe.setter
def readiness_probe(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']]):
pulumi.set(self, "readiness_probe", value)
@property
@pulumi.getter(name="replicaCount")
def replica_count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "replica_count")
@replica_count.setter
def replica_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "replica_count", value)
@property
@pulumi.getter(name="reportNodeInternalIp")
def report_node_internal_ip(self) -> Optional[pulumi.Input[bool]]:
"""
Bare-metal considerations via the host network https://kubernetes.github.io/ingress-nginx/deploy/baremetal/#via-the-host-network Ingress status was blank because there is no Service exposing the NGINX Ingress controller in a configuration using the host network, the default --publish-service flag used in standard cloud setups does not apply.
"""
return pulumi.get(self, "report_node_internal_ip")
@report_node_internal_ip.setter
def report_node_internal_ip(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "report_node_internal_ip", value)
@property
@pulumi.getter
def resources(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']]:
"""
Define requests resources to avoid probe issues due to CPU utilization in busy nodes ref: https://github.com/kubernetes/ingress-nginx/issues/4735#issuecomment-551204903 Ideally, there should be no limits. https://engineering.indeedblog.com/blog/2019/12/cpu-throttling-regression-fix/
"""
return pulumi.get(self, "resources")
@resources.setter
def resources(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ResourceRequirementsArgs']]):
pulumi.set(self, "resources", value)
@property
@pulumi.getter
def scope(self) -> Optional[pulumi.Input['ControllerScopeArgs']]:
"""
Limit the scope of the controller.
"""
return pulumi.get(self, "scope")
@scope.setter
def scope(self, value: Optional[pulumi.Input['ControllerScopeArgs']]):
pulumi.set(self, "scope", value)
@property
@pulumi.getter
def service(self) -> Optional[pulumi.Input['ControllerServiceArgs']]:
return pulumi.get(self, "service")
@service.setter
def service(self, value: Optional[pulumi.Input['ControllerServiceArgs']]):
pulumi.set(self, "service", value)
@property
@pulumi.getter(name="startupProbe")
def startup_probe(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']]:
"""
Startup probe values Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#container-probes.
"""
return pulumi.get(self, "startup_probe")
@startup_probe.setter
def startup_probe(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.ProbeArgs']]):
pulumi.set(self, "startup_probe", value)
@property
@pulumi.getter
def sysctls(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
"""
See https://kubernetes.io/docs/tasks/administer-cluster/sysctl-cluster/ for notes on enabling and using sysctls.
"""
return pulumi.get(self, "sysctls")
@sysctls.setter
def sysctls(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "sysctls", value)
@property
@pulumi.getter
def tcp(self) -> Optional[pulumi.Input['ControllerTcpArgs']]:
"""
Allows customization of the tcp-services-configmap.
"""
return pulumi.get(self, "tcp")
@tcp.setter
def tcp(self, value: Optional[pulumi.Input['ControllerTcpArgs']]):
pulumi.set(self, "tcp", value)
@property
@pulumi.getter(name="terminateGracePeriodSeconds")
def terminate_grace_period_seconds(self) -> Optional[pulumi.Input[int]]:
"""
How long to wait for the drain of connections.
"""
return pulumi.get(self, "terminate_grace_period_seconds")
@terminate_grace_period_seconds.setter
def terminate_grace_period_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "terminate_grace_period_seconds", value)
@property
@pulumi.getter
def tolerations(self) -> Optional[pulumi.Input['pulumi_kubernetes.core.v1.TolerationArgs']]:
"""
Node tolerations for server scheduling to nodes with taints Ref: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/.
"""
return pulumi.get(self, "tolerations")
@tolerations.setter
def tolerations(self, value: Optional[pulumi.Input['pulumi_kubernetes.core.v1.TolerationArgs']]):
pulumi.set(self, "tolerations", value)
@property
@pulumi.getter(name="topologySpreadConstraints")
def topology_spread_constraints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.TopologySpreadConstraintArgs']]]]:
"""
Topology spread constraints rely on node labels to identify the topology domain(s) that each Node is in. Ref: https://kubernetes.io/docs/concepts/workloads/pods/pod-topology-spread-constraints/.
"""
return pulumi.get(self, "topology_spread_constraints")
@topology_spread_constraints.setter
def topology_spread_constraints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['pulumi_kubernetes.core.v1.TopologySpreadConstraintArgs']]]]):
pulumi.set(self, "topology_spread_constraints", value)
@property
@pulumi.getter
def udp(self) -> Optional[pulumi.Input['ControllerUdpArgs']]:
return pulumi.get(self, "udp")
@udp.setter
def udp(self, value: Optional[pulumi.Input['ControllerUdpArgs']]):
pulumi.set(self, "udp", value)
@property
@pulumi.getter(name="updateStrategy")
def update_strategy(self) -> Optional[pulumi.Input['ControllerUpdateStrategyArgs']]:
"""
The update strategy to apply to the Deployment or DaemonSet.
"""
return pulumi.get(self, "update_strategy")
@update_strategy.setter
def update_strategy(self, value: Optional[pulumi.Input['ControllerUpdateStrategyArgs']]):
pulumi.set(self, "update_strategy", value)
@property
@pulumi.getter(name="watchIngressWithoutClass")
def watch_ingress_without_class(self) -> Optional[pulumi.Input[bool]]:
"""
Process Ingress objects without ingressClass annotation/ingressClassName field. Overrides value for --watch-ingress-without-class flag of the controller binary. Defaults to false.
"""
return pulumi.get(self, "watch_ingress_without_class")
@watch_ingress_without_class.setter
def watch_ingress_without_class(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "watch_ingress_without_class", value)
@pulumi.input_type
class KedaScaledObjectArgs:
def __init__(__self__, *,
annotations: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] annotations: Custom annotations for ScaledObject resource.
"""
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Custom annotations for ScaledObject resource.
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "annotations", value)
@pulumi.input_type
class KedaTriggerArgs:
def __init__(__self__, *,
metadata: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]] = None,
type: Optional[pulumi.Input[str]] = None):
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]:
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Mapping[str, pulumi.Input[str]]]]]]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class KedaArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
behavior: Optional[pulumi.Input['AutoscalingBehaviorArgs']] = None,
cooldown_period: Optional[pulumi.Input[int]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
max_replicas: Optional[pulumi.Input[int]] = None,
min_replicas: Optional[pulumi.Input[int]] = None,
polling_interval: Optional[pulumi.Input[int]] = None,
restore_to_original_replica_count: Optional[pulumi.Input[bool]] = None,
scaled_object: Optional[pulumi.Input['KedaScaledObjectArgs']] = None,
triggers: Optional[pulumi.Input[Sequence[pulumi.Input['KedaTriggerArgs']]]] = None):
"""
:param pulumi.Input[str] api_version: apiVersion changes with keda 1.x vs 2.x: 2.x = keda.sh/v1alpha1, 1.x = keda.k8s.io/v1alpha1.
"""
if api_version is not None:
pulumi.set(__self__, "api_version", api_version)
if behavior is not None:
pulumi.set(__self__, "behavior", behavior)
if cooldown_period is not None:
pulumi.set(__self__, "cooldown_period", cooldown_period)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if max_replicas is not None:
pulumi.set(__self__, "max_replicas", max_replicas)
if min_replicas is not None:
pulumi.set(__self__, "min_replicas", min_replicas)
if polling_interval is not None:
pulumi.set(__self__, "polling_interval", polling_interval)
if restore_to_original_replica_count is not None:
pulumi.set(__self__, "restore_to_original_replica_count", restore_to_original_replica_count)
if scaled_object is not None:
pulumi.set(__self__, "scaled_object", scaled_object)
if triggers is not None:
pulumi.set(__self__, "triggers", triggers)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
apiVersion changes with keda 1.x vs 2.x: 2.x = keda.sh/v1alpha1, 1.x = keda.k8s.io/v1alpha1.
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def behavior(self) -> Optional[pulumi.Input['AutoscalingBehaviorArgs']]:
return pulumi.get(self, "behavior")
@behavior.setter
def behavior(self, value: Optional[pulumi.Input['AutoscalingBehaviorArgs']]):
pulumi.set(self, "behavior", value)
@property
@pulumi.getter(name="cooldownPeriod")
def cooldown_period(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "cooldown_period")
@cooldown_period.setter
def cooldown_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "cooldown_period", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="maxReplicas")
def max_replicas(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_replicas")
@max_replicas.setter
def max_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_replicas", value)
@property
@pulumi.getter(name="minReplicas")
def min_replicas(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_replicas")
@min_replicas.setter
def min_replicas(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_replicas", value)
@property
@pulumi.getter(name="pollingInterval")
def polling_interval(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "polling_interval")
@polling_interval.setter
def polling_interval(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "polling_interval", value)
@property
@pulumi.getter(name="restoreToOriginalReplicaCount")
def restore_to_original_replica_count(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "restore_to_original_replica_count")
@restore_to_original_replica_count.setter
def restore_to_original_replica_count(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "restore_to_original_replica_count", value)
@property
@pulumi.getter(name="scaledObject")
def scaled_object(self) -> Optional[pulumi.Input['KedaScaledObjectArgs']]:
return pulumi.get(self, "scaled_object")
@scaled_object.setter
def scaled_object(self, value: Optional[pulumi.Input['KedaScaledObjectArgs']]):
pulumi.set(self, "scaled_object", value)
@property
@pulumi.getter
def triggers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KedaTriggerArgs']]]]:
return pulumi.get(self, "triggers")
@triggers.setter
def triggers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KedaTriggerArgs']]]]):
pulumi.set(self, "triggers", value)
@pulumi.input_type
class ReleaseArgs:
def __init__(__self__, *,
atomic: Optional[pulumi.Input[bool]] = None,
chart: Optional[pulumi.Input[str]] = None,
cleanup_on_fail: Optional[pulumi.Input[bool]] = None,
create_namespace: Optional[pulumi.Input[bool]] = None,
dependency_update: Optional[pulumi.Input[bool]] = None,
description: Optional[pulumi.Input[str]] = None,
devel: Optional[pulumi.Input[bool]] = None,
disable_crd_hooks: Optional[pulumi.Input[bool]] = None,
disable_openapi_validation: Optional[pulumi.Input[bool]] = None,
disable_webhooks: Optional[pulumi.Input[bool]] = None,
force_update: Optional[pulumi.Input[bool]] = None,
keyring: Optional[pulumi.Input[str]] = None,
lint: Optional[pulumi.Input[bool]] = None,
manifest: Optional[pulumi.Input[Mapping[str, Any]]] = None,
max_history: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
namespace: Optional[pulumi.Input[str]] = None,
postrender: Optional[pulumi.Input[str]] = None,
recreate_pods: Optional[pulumi.Input[bool]] = None,
render_subchart_notes: Optional[pulumi.Input[bool]] = None,
replace: Optional[pulumi.Input[bool]] = None,
repository_opts: Optional[pulumi.Input['RepositoryOptsArgs']] = None,
reset_values: Optional[pulumi.Input[bool]] = None,
resource_names: Optional[pulumi.Input[Mapping[str, pulumi.Input[Sequence[pulumi.Input[str]]]]]] = None,
reuse_values: Optional[pulumi.Input[bool]] = None,
skip_await: Optional[pulumi.Input[bool]] = None,
skip_crds: Optional[pulumi.Input[bool]] = None,
timeout: Optional[pulumi.Input[int]] = None,
value_yaml_files: Optional[pulumi.Input[Sequence[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]]]] = None,
values: Optional[pulumi.Input[Mapping[str, Any]]] = None,
verify: Optional[pulumi.Input[bool]] = None,
version: Optional[pulumi.Input[str]] = None,
wait_for_jobs: Optional[pulumi.Input[bool]] = None):
"""
A Release is an instance of a chart running in a Kubernetes cluster.
A Chart is a Helm package. It contains all of the resource definitions necessary to run an application, tool, or service inside of a Kubernetes cluster.
Note - Helm Release is currently in BETA and may change. Use in production environment is discouraged.
:param pulumi.Input[bool] atomic: If set, installation process purges chart on fail. `skipAwait` will be disabled automatically if atomic is used.
:param pulumi.Input[str] chart: Chart name to be installed. A path may be used.
:param pulumi.Input[bool] cleanup_on_fail: Allow deletion of new resources created in this upgrade when upgrade fails.
:param pulumi.Input[bool] create_namespace: Create the namespace if it does not exist.
:param pulumi.Input[bool] dependency_update: Run helm dependency update before installing the chart.
:param pulumi.Input[str] description: Add a custom description
:param pulumi.Input[bool] devel: Use chart development versions, too. Equivalent to version '>0.0.0-0'. If `version` is set, this is ignored.
:param pulumi.Input[bool] disable_crd_hooks: Prevent CRD hooks from, running, but run other hooks. See helm install --no-crd-hook
:param pulumi.Input[bool] disable_openapi_validation: If set, the installation process will not validate rendered templates against the Kubernetes OpenAPI Schema
:param pulumi.Input[bool] disable_webhooks: Prevent hooks from running.
:param pulumi.Input[bool] force_update: Force resource update through delete/recreate if needed.
:param pulumi.Input[str] keyring: Location of public keys used for verification. Used only if `verify` is true
:param pulumi.Input[bool] lint: Run helm lint when planning.
:param pulumi.Input[Mapping[str, Any]] manifest: The rendered manifests as JSON. Not yet supported.
:param pulumi.Input[int] max_history: Limit the maximum number of revisions saved per release. Use 0 for no limit.
:param pulumi.Input[str] name: Release name.
:param pulumi.Input[str] namespace: Namespace to install the release into.
:param pulumi.Input[str] postrender: Postrender command to run.
:param pulumi.Input[bool] recreate_pods: Perform pods restart during upgrade/rollback.
:param pulumi.Input[bool] render_subchart_notes: If set, render subchart notes along with the parent.
:param pulumi.Input[bool] replace: Re-use the given name, even if that name is already used. This is unsafe in production
:param pulumi.Input['RepositoryOptsArgs'] repository_opts: Specification defining the Helm chart repository to use.
:param pulumi.Input[bool] reset_values: When upgrading, reset the values to the ones built into the chart.
:param pulumi.Input[Mapping[str, pulumi.Input[Sequence[pulumi.Input[str]]]]] resource_names: Names of resources created by the release grouped by "kind/version".
:param pulumi.Input[bool] reuse_values: When upgrading, reuse the last release's values and merge in any overrides. If 'resetValues' is specified, this is ignored
:param pulumi.Input[bool] skip_await: By default, the provider waits until all resources are in a ready state before marking the release as successful. Setting this to true will skip such await logic.
:param pulumi.Input[bool] skip_crds: If set, no CRDs will be installed. By default, CRDs are installed if not already present.
:param pulumi.Input[int] timeout: Time in seconds to wait for any individual kubernetes operation.
:param pulumi.Input[Sequence[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]]] value_yaml_files: List of assets (raw yaml files). Content is read and merged with values. Not yet supported.
:param pulumi.Input[Mapping[str, Any]] values: Custom values set for the release.
:param pulumi.Input[bool] verify: Verify the package before installing it.
:param pulumi.Input[str] version: Specify the exact chart version to install. If this is not specified, the latest version is installed.
:param pulumi.Input[bool] wait_for_jobs: Will wait until all Jobs have been completed before marking the release as successful. This is ignored if `skipAwait` is enabled.
"""
if atomic is not None:
pulumi.set(__self__, "atomic", atomic)
if chart is not None:
pulumi.set(__self__, "chart", chart)
if cleanup_on_fail is not None:
pulumi.set(__self__, "cleanup_on_fail", cleanup_on_fail)
if create_namespace is not None:
pulumi.set(__self__, "create_namespace", create_namespace)
if dependency_update is not None:
pulumi.set(__self__, "dependency_update", dependency_update)
if description is not None:
pulumi.set(__self__, "description", description)
if devel is not None:
pulumi.set(__self__, "devel", devel)
if disable_crd_hooks is not None:
pulumi.set(__self__, "disable_crd_hooks", disable_crd_hooks)
if disable_openapi_validation is not None:
pulumi.set(__self__, "disable_openapi_validation", disable_openapi_validation)
if disable_webhooks is not None:
pulumi.set(__self__, "disable_webhooks", disable_webhooks)
if force_update is not None:
pulumi.set(__self__, "force_update", force_update)
if keyring is not None:
pulumi.set(__self__, "keyring", keyring)
if lint is not None:
pulumi.set(__self__, "lint", lint)
if manifest is not None:
pulumi.set(__self__, "manifest", manifest)
if max_history is not None:
pulumi.set(__self__, "max_history", max_history)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if postrender is not None:
pulumi.set(__self__, "postrender", postrender)
if recreate_pods is not None:
pulumi.set(__self__, "recreate_pods", recreate_pods)
if render_subchart_notes is not None:
pulumi.set(__self__, "render_subchart_notes", render_subchart_notes)
if replace is not None:
pulumi.set(__self__, "replace", replace)
if repository_opts is not None:
pulumi.set(__self__, "repository_opts", repository_opts)
if reset_values is not None:
pulumi.set(__self__, "reset_values", reset_values)
if resource_names is not None:
pulumi.set(__self__, "resource_names", resource_names)
if reuse_values is not None:
pulumi.set(__self__, "reuse_values", reuse_values)
if skip_await is not None:
pulumi.set(__self__, "skip_await", skip_await)
if skip_crds is not None:
pulumi.set(__self__, "skip_crds", skip_crds)
if timeout is not None:
pulumi.set(__self__, "timeout", timeout)
if value_yaml_files is not None:
pulumi.set(__self__, "value_yaml_files", value_yaml_files)
if values is not None:
pulumi.set(__self__, "values", values)
if verify is not None:
pulumi.set(__self__, "verify", verify)
if version is not None:
pulumi.set(__self__, "version", version)
if wait_for_jobs is not None:
pulumi.set(__self__, "wait_for_jobs", wait_for_jobs)
@property
@pulumi.getter
def atomic(self) -> Optional[pulumi.Input[bool]]:
"""
If set, installation process purges chart on fail. `skipAwait` will be disabled automatically if atomic is used.
"""
return pulumi.get(self, "atomic")
@atomic.setter
def atomic(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "atomic", value)
@property
@pulumi.getter
def chart(self) -> Optional[pulumi.Input[str]]:
"""
Chart name to be installed. A path may be used.
"""
return pulumi.get(self, "chart")
@chart.setter
def chart(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "chart", value)
@property
@pulumi.getter(name="cleanupOnFail")
def cleanup_on_fail(self) -> Optional[pulumi.Input[bool]]:
"""
Allow deletion of new resources created in this upgrade when upgrade fails.
"""
return pulumi.get(self, "cleanup_on_fail")
@cleanup_on_fail.setter
def cleanup_on_fail(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "cleanup_on_fail", value)
@property
@pulumi.getter(name="createNamespace")
def create_namespace(self) -> Optional[pulumi.Input[bool]]:
"""
Create the namespace if it does not exist.
"""
return pulumi.get(self, "create_namespace")
@create_namespace.setter
def create_namespace(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "create_namespace", value)
@property
@pulumi.getter(name="dependencyUpdate")
def dependency_update(self) -> Optional[pulumi.Input[bool]]:
"""
Run helm dependency update before installing the chart.
"""
return pulumi.get(self, "dependency_update")
@dependency_update.setter
def dependency_update(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "dependency_update", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Add a custom description
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def devel(self) -> Optional[pulumi.Input[bool]]:
"""
Use chart development versions, too. Equivalent to version '>0.0.0-0'. If `version` is set, this is ignored.
"""
return pulumi.get(self, "devel")
@devel.setter
def devel(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "devel", value)
@property
@pulumi.getter(name="disableCRDHooks")
def disable_crd_hooks(self) -> Optional[pulumi.Input[bool]]:
"""
Prevent CRD hooks from, running, but run other hooks. See helm install --no-crd-hook
"""
return pulumi.get(self, "disable_crd_hooks")
@disable_crd_hooks.setter
def disable_crd_hooks(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disable_crd_hooks", value)
@property
@pulumi.getter(name="disableOpenapiValidation")
def disable_openapi_validation(self) -> Optional[pulumi.Input[bool]]:
"""
If set, the installation process will not validate rendered templates against the Kubernetes OpenAPI Schema
"""
return pulumi.get(self, "disable_openapi_validation")
@disable_openapi_validation.setter
def disable_openapi_validation(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disable_openapi_validation", value)
@property
@pulumi.getter(name="disableWebhooks")
def disable_webhooks(self) -> Optional[pulumi.Input[bool]]:
"""
Prevent hooks from running.
"""
return pulumi.get(self, "disable_webhooks")
@disable_webhooks.setter
def disable_webhooks(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disable_webhooks", value)
@property
@pulumi.getter(name="forceUpdate")
def force_update(self) -> Optional[pulumi.Input[bool]]:
"""
Force resource update through delete/recreate if needed.
"""
return pulumi.get(self, "force_update")
@force_update.setter
def force_update(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "force_update", value)
@property
@pulumi.getter
def keyring(self) -> Optional[pulumi.Input[str]]:
"""
Location of public keys used for verification. Used only if `verify` is true
"""
return pulumi.get(self, "keyring")
@keyring.setter
def keyring(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "keyring", value)
@property
@pulumi.getter
def lint(self) -> Optional[pulumi.Input[bool]]:
"""
Run helm lint when planning.
"""
return pulumi.get(self, "lint")
@lint.setter
def lint(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "lint", value)
@property
@pulumi.getter
def manifest(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
The rendered manifests as JSON. Not yet supported.
"""
return pulumi.get(self, "manifest")
@manifest.setter
def manifest(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "manifest", value)
@property
@pulumi.getter(name="maxHistory")
def max_history(self) -> Optional[pulumi.Input[int]]:
"""
Limit the maximum number of revisions saved per release. Use 0 for no limit.
"""
return pulumi.get(self, "max_history")
@max_history.setter
def max_history(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_history", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Release name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def namespace(self) -> Optional[pulumi.Input[str]]:
"""
Namespace to install the release into.
"""
return pulumi.get(self, "namespace")
@namespace.setter
def namespace(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "namespace", value)
@property
@pulumi.getter
def postrender(self) -> Optional[pulumi.Input[str]]:
"""
Postrender command to run.
"""
return pulumi.get(self, "postrender")
@postrender.setter
def postrender(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "postrender", value)
@property
@pulumi.getter(name="recreatePods")
def recreate_pods(self) -> Optional[pulumi.Input[bool]]:
"""
Perform pods restart during upgrade/rollback.
"""
return pulumi.get(self, "recreate_pods")
@recreate_pods.setter
def recreate_pods(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "recreate_pods", value)
@property
@pulumi.getter(name="renderSubchartNotes")
def render_subchart_notes(self) -> Optional[pulumi.Input[bool]]:
"""
If set, render subchart notes along with the parent.
"""
return pulumi.get(self, "render_subchart_notes")
@render_subchart_notes.setter
def render_subchart_notes(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "render_subchart_notes", value)
@property
@pulumi.getter
def replace(self) -> Optional[pulumi.Input[bool]]:
"""
Re-use the given name, even if that name is already used. This is unsafe in production
"""
return pulumi.get(self, "replace")
@replace.setter
def replace(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "replace", value)
@property
@pulumi.getter(name="repositoryOpts")
def repository_opts(self) -> Optional[pulumi.Input['RepositoryOptsArgs']]:
"""
Specification defining the Helm chart repository to use.
"""
return pulumi.get(self, "repository_opts")
@repository_opts.setter
def repository_opts(self, value: Optional[pulumi.Input['RepositoryOptsArgs']]):
pulumi.set(self, "repository_opts", value)
@property
@pulumi.getter(name="resetValues")
def reset_values(self) -> Optional[pulumi.Input[bool]]:
"""
When upgrading, reset the values to the ones built into the chart.
"""
return pulumi.get(self, "reset_values")
@reset_values.setter
def reset_values(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "reset_values", value)
@property
@pulumi.getter(name="resourceNames")
def resource_names(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[Sequence[pulumi.Input[str]]]]]]:
"""
Names of resources created by the release grouped by "kind/version".
"""
return pulumi.get(self, "resource_names")
@resource_names.setter
def resource_names(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[Sequence[pulumi.Input[str]]]]]]):
pulumi.set(self, "resource_names", value)
@property
@pulumi.getter(name="reuseValues")
def reuse_values(self) -> Optional[pulumi.Input[bool]]:
"""
When upgrading, reuse the last release's values and merge in any overrides. If 'resetValues' is specified, this is ignored
"""
return pulumi.get(self, "reuse_values")
@reuse_values.setter
def reuse_values(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "reuse_values", value)
@property
@pulumi.getter(name="skipAwait")
def skip_await(self) -> Optional[pulumi.Input[bool]]:
"""
By default, the provider waits until all resources are in a ready state before marking the release as successful. Setting this to true will skip such await logic.
"""
return pulumi.get(self, "skip_await")
@skip_await.setter
def skip_await(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "skip_await", value)
@property
@pulumi.getter(name="skipCrds")
def skip_crds(self) -> Optional[pulumi.Input[bool]]:
"""
If set, no CRDs will be installed. By default, CRDs are installed if not already present.
"""
return pulumi.get(self, "skip_crds")
@skip_crds.setter
def skip_crds(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "skip_crds", value)
@property
@pulumi.getter
def timeout(self) -> Optional[pulumi.Input[int]]:
"""
Time in seconds to wait for any individual kubernetes operation.
"""
return pulumi.get(self, "timeout")
@timeout.setter
def timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "timeout", value)
@property
@pulumi.getter(name="valueYamlFiles")
def value_yaml_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]]]]:
"""
List of assets (raw yaml files). Content is read and merged with values. Not yet supported.
"""
return pulumi.get(self, "value_yaml_files")
@value_yaml_files.setter
def value_yaml_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Union[pulumi.Asset, pulumi.Archive]]]]]):
pulumi.set(self, "value_yaml_files", value)
@property
@pulumi.getter
def values(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Custom values set for the release.
"""
return pulumi.get(self, "values")
@values.setter
def values(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "values", value)
@property
@pulumi.getter
def verify(self) -> Optional[pulumi.Input[bool]]:
"""
Verify the package before installing it.
"""
return pulumi.get(self, "verify")
@verify.setter
def verify(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "verify", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
Specify the exact chart version to install. If this is not specified, the latest version is installed.
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@property
@pulumi.getter(name="waitForJobs")
def wait_for_jobs(self) -> Optional[pulumi.Input[bool]]:
"""
Will wait until all Jobs have been completed before marking the release as successful. This is ignored if `skipAwait` is enabled.
"""
return pulumi.get(self, "wait_for_jobs")
@wait_for_jobs.setter
def wait_for_jobs(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "wait_for_jobs", value)
@pulumi.input_type
class RepositoryOptsArgs:
def __init__(__self__, *,
ca_file: Optional[pulumi.Input[str]] = None,
cert_file: Optional[pulumi.Input[str]] = None,
key_file: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
repo: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None):
"""
Specification defining the Helm chart repository to use.
:param pulumi.Input[str] ca_file: The Repository's CA File
:param pulumi.Input[str] cert_file: The repository's cert file
:param pulumi.Input[str] key_file: The repository's cert key file
:param pulumi.Input[str] password: Password for HTTP basic authentication
:param pulumi.Input[str] repo: Repository where to locate the requested chart. If is a URL the chart is installed without installing the repository.
:param pulumi.Input[str] username: Username for HTTP basic authentication
"""
if ca_file is not None:
pulumi.set(__self__, "ca_file", ca_file)
if cert_file is not None:
pulumi.set(__self__, "cert_file", cert_file)
if key_file is not None:
pulumi.set(__self__, "key_file", key_file)
if password is not None:
pulumi.set(__self__, "password", password)
if repo is not None:
pulumi.set(__self__, "repo", repo)
if username is not None:
pulumi.set(__self__, "username", username)
@property
@pulumi.getter(name="caFile")
def ca_file(self) -> Optional[pulumi.Input[str]]:
"""
The Repository's CA File
"""
return pulumi.get(self, "ca_file")
@ca_file.setter
def ca_file(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ca_file", value)
@property
@pulumi.getter(name="certFile")
def cert_file(self) -> Optional[pulumi.Input[str]]:
"""
The repository's cert file
"""
return pulumi.get(self, "cert_file")
@cert_file.setter
def cert_file(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cert_file", value)
@property
@pulumi.getter(name="keyFile")
def key_file(self) -> Optional[pulumi.Input[str]]:
"""
The repository's cert key file
"""
return pulumi.get(self, "key_file")
@key_file.setter
def key_file(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_file", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
Password for HTTP basic authentication
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter
def repo(self) -> Optional[pulumi.Input[str]]:
"""
Repository where to locate the requested chart. If is a URL the chart is installed without installing the repository.
"""
return pulumi.get(self, "repo")
@repo.setter
def repo(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repo", value)
@property
@pulumi.getter
def username(self) -> Optional[pulumi.Input[str]]:
"""
Username for HTTP basic authentication
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username", value)
| StarcoderdataPython |
112679 | def search(list, key):
pos = -1
for i in range(0,len(list)):
if list[i] == key:
pos = i + 1
break
if pos != -1:
print("\n\nThe value {} is found to be at {} position!".format(key,pos))
else:
print("\n\nThe value {} cannot be found!".format(key))
print("\nEnter the elements of the array in one string with spaces: \n")
array = list(map(int, input().split()))
("\nEnter the number you want to find: \n")
key = int(input("\nEnter the number you want to find: \n"))
search(array,key)
| StarcoderdataPython |
17967 | # STL imports
import random
import logging
import string
import time
import datetime
import random
import struct
import sys
from functools import wraps
# Third party imports
import numpy as np
import faker
from faker.providers import BaseProvider
logging.getLogger('faker').setLevel(logging.ERROR)
sys.path.append('.')
# grpc
from milvus.grpc_gen import milvus_pb2
def gen_vectors(num, dim):
return [[random.random() for _ in range(dim)] for _ in range(num)]
def gen_single_vector(dim):
return [[random.random() for _ in range(dim)]]
def gen_vector(nb, d, seed=np.random.RandomState(1234)):
xb = seed.rand(nb, d).astype("float32")
return xb.tolist()
def gen_unique_str(str=None):
prefix = "".join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
return prefix if str is None else str + "_" + prefix
def get_current_day():
return time.strftime('%Y-%m-%d', time.localtime())
def get_last_day(day):
tmp = datetime.datetime.now() - datetime.timedelta(days=day)
return tmp.strftime('%Y-%m-%d')
def get_next_day(day):
tmp = datetime.datetime.now() + datetime.timedelta(days=day)
return tmp.strftime('%Y-%m-%d')
def gen_long_str(num):
string = ''
for _ in range(num):
char = random.choice('tomorrow')
string += char
def gen_one_binary(topk):
ids = [random.randrange(10000000, 99999999) for _ in range(topk)]
distances = [random.random() for _ in range(topk)]
return milvus_pb2.TopKQueryResult(struct.pack(str(topk) + 'l', *ids), struct.pack(str(topk) + 'd', *distances))
def gen_nq_binaries(nq, topk):
return [gen_one_binary(topk) for _ in range(nq)]
def fake_query_bin_result(nq, topk):
return gen_nq_binaries(nq, topk)
class FakerProvider(BaseProvider):
def collection_name(self):
return 'collection_names' + str(random.randint(1000, 9999))
def name(self):
return 'name' + str(random.randint(1000, 9999))
def dim(self):
return random.randint(0, 999)
fake = faker.Faker()
fake.add_provider(FakerProvider)
def collection_name_factory():
return fake.collection_name()
def records_factory(dimension, nq):
return [[random.random() for _ in range(dimension)] for _ in range(nq)]
def binary_records_factory(dimension, nq):
def binary_record(bsize):
s_m = "abcdefghijklmnopqrstuvwxyz"
s_list = [s_m[random.randint(0, 25)] for _ in range(bsize)]
s = "".join(s_list)
return bytes(s, encoding="ASCII")
bs = dimension // 8
return [binary_record(bs) for _ in range(nq)]
def integer_factory(nq):
return [random.randint(0, 128) for _ in range(nq)]
def time_it(func):
@wraps(func)
def inner(*args, **kwrgs):
pref = time.perf_counter()
result = func(*args, **kwrgs)
delt = time.perf_counter() - pref
print(f"[{func.__name__}][{delt:.4}s]")
return result
return inner
| StarcoderdataPython |
1614066 | <reponame>ofersadan85/linode-cli
"""
This file is an example third-party plugin. See `the plugin docs`_ for more
information.
.. _the plugin docs: https://github.com/linode/linode-cli/blob/master/linodecli/plugins/README.md
"""
#: This is the name the plugin will be invoked with once it's registered. Note
#: that this name is different than the module name, which is what's used to
#: register it. This is required for all third party plugins.
PLUGIN_NAME = "example-plugin"
def call(args, context):
"""
This is the entrypoint for the plugin when invoked through the CLI. See the
docs linked above for more information.
"""
print("Hello world!")
| StarcoderdataPython |
4807238 | # -*- coding: utf-8 -*-
# Librerias
import numpy as np
import pandas as pd
from pandas.tseries.holiday import USFederalHolidayCalendar as calendar
import tensorflow as tf
from sklearn.model_selection import train_test_split
import seaborn as sns
import os
from sklearn.metrics import r2_score, mean_absolute_error, mean_squared_error
import matplotlib as mpl
import matplotlib.pyplot as plt
# %matplotlib inline
#Parametros de ploteos
plt.style.use('seaborn')
mpl.rcParams.update({'axes.titlesize': 24,
'axes.labelsize': 20,
'lines.linewidth': 2,
'lines.markersize': 10,
'xtick.labelsize': 16,
'ytick.labelsize': 16,
'figure.figsize': (12, 8),
'legend.fontsize': 13,
'legend.handlelength': 2})
#Determinar la GPU como hardware de computacion
tf.debugging.set_log_device_placement(True)
a = tf.constant([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
b = tf.constant([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]])
c = tf.matmul(a, b)
print(c)
#Data set https://www.kaggle.com/coldperformer/online-retail-data-v3?select=RetailDataIII.csv
data = pd.read_csv("RetailDataIII.csv")
data.head()
data.Product.value_counts().head(10)
productos = ["White hanging heart t-light holder", 'Regency cakestand 3 tier', 'Jumbo bag red retrospot']
dataset = data[data.Product.isin(productos)]
# del data
#Extracion de datos
dataset.to_csv("final_data.csv", index=False)
dataset.head()
dataset.BillDate = pd.to_datetime(dataset.BillDate)
cal = calendar()
holidays = cal.holidays(start=dataset.BillDate.min(), end=dataset.BillDate.max())
#Creacion de variables adicionales
dataset["Month"] = dataset.BillDate.dt.month
dataset["Day"] = dataset.BillDate.dt.day
dataset["nameofday"] = dataset.BillDate.dt.day_name()
dataset["dayofweek"] = dataset.BillDate.dt.dayofweek
dataset.loc[dataset.dayofweek >= 5, "isWeekend"] = 1
dataset.loc[dataset.dayofweek < 5, "isWeekend"] = 0
dataset["isHoliday"] = dataset.BillDate.isin(holidays)
dataset.isHoliday = dataset.isHoliday.map({True: 1, False: 0})
dataset["Year"] = dataset.BillDate.dt.year
dataset["WeekOfYear"] = dataset.BillDate.dt.weekofyear
dataset.head()
"""## Con los 13K datos"""
# Label product names
dataset.Product, values = pd.factorize(dataset.Product)
dataset.head()
# Quitar Quota negativos y outliers
LOWER = dataset.Quota.quantile(q=0.05)
UPPER = dataset.Quota.quantile(q=0.99)
dataset = dataset[(dataset.Quota >= 0) & (dataset.Quota <= UPPER)]
# product_cols = pd.get_dummies(dataset.Product, prefix="Product")
# dataset = pd.concat([dataset, product_cols], axis=1)
# dataset.drop(columns="Product", inplace=True)
dataset.reset_index(drop=True ,inplace=True)
data = pd.DataFrame()
print(dataset.count())
dataset = dataset.loc[:len(dataset)-5, :]
#Obteniendo los valores de venta de dias consiguientes
data['D1'] = dataset.loc[::5, "Quota"]
data['D2'] = dataset.loc[1::5, "Quota"].values
data['D3'] = dataset.loc[2::5, "Quota"].values
data['D4'] = dataset.loc[3::5, "Quota"].values
data['D5'] = dataset.loc[4::5, "Quota"].values
X_full = data.drop(columns="D5")
y_full = data["D5"]
#X_full = dataset.drop(labels=['Bill', 'MerchandiseID', 'BillDate', 'CustomerID', 'Country', 'Product','Quota','nameofday'], axis=1)
#y_full = dataset['Quota']
dataset.columns
X_full.columns
#Especificacion de datos de entrenamiento y validacion
X_train, X_test, y_train, y_test = train_test_split(X_full, y_full, test_size=0.2, random_state=42)
X_train, X_valid, y_train, y_valid = train_test_split(X_train, y_train, test_size=0.25, random_state=42)
X_train.shape
X_train.head()
#Estructuracion de la arquitectura de red neuronal, en este caso RN4
#glorot_uniform = Xavier Initialization
# -------- RN 1 -------- #
model = tf.keras.models.Sequential(
[
tf.keras.layers.InputLayer(input_shape=X_train.iloc[0, :].shape),
tf.keras.layers.Dense(2, kernel_initializer="glorot_uniform", use_bias=False),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation("tanh"),
tf.keras.layers.Dense(1)
]
)
# -------- RN 2 -------- #
model = tf.keras.models.Sequential(
[
tf.keras.layers.InputLayer(input_shape=X_train.iloc[0, :].shape),
tf.keras.layers.Dense(3, kernel_initializer="glorot_uniform", use_bias=False),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation("tanh"),
tf.keras.layers.Dense(2, kernel_initializer="glorot_uniform", use_bias=False),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation("tanh"),
tf.keras.layers.Dense(1)
]
)
# -------- RN 3 -------- #
model = tf.keras.models.Sequential(
[
tf.keras.layers.InputLayer(input_shape=X_train.iloc[0, :].shape),
tf.keras.layers.Dense(4, kernel_initializer="glorot_uniform", use_bias=False),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation("tanh"),
tf.keras.layers.Dense(5, kernel_initializer="glorot_uniform", use_bias=False),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation("tanh"),
tf.keras.layers.Dense(5, kernel_initializer="glorot_uniform", use_bias=False),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation("tanh"),
tf.keras.layers.Dense(1)
]
)
# -------- RN 4 -------- #
model = tf.keras.models.Sequential(
[
tf.keras.layers.InputLayer(input_shape=X_train.iloc[0, :].shape),
tf.keras.layers.Dense(6, kernel_initializer="glorot_uniform", use_bias=False),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation("tanh"),
tf.keras.layers.Dense(5, kernel_initializer="glorot_uniform", use_bias=False),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation("tanh"),
tf.keras.layers.Dense(6, kernel_initializer="glorot_uniform", use_bias=False),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation("tanh"),
tf.keras.layers.Dense(5, kernel_initializer="glorot_uniform", use_bias=False),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation("tanh"),
tf.keras.layers.Dense(1)
]
)
# Metricas
metrics = [
tf.keras.metrics.MAE,
tf.keras.metrics.MAPE,
tf.keras.metrics.MSE
]
# Optimizer
optimizer = tf.keras.optimizers.SGD(learning_rate=0.0005)
# Model compile
model.compile(
optimizer=optimizer,
loss=['mape', 'mse'],
metrics=metrics,
)
"""# RNN"""
from sklearn.metrics import mean_squared_error, mean_absolute_error
import time
#Medicion de tiempo de ejecucion
start_time = time.time()
#Inicio de entrenamiento
history = model.fit(
x=X_train.values,
y=y_train,
epochs=200,
validation_data=(X_valid.values, y_valid),
#callbacks=[tf.keras.callbacks.EarlyStopping(patience=50)],
batch_size=500
)
print("Time: ", time.time() - start_time)
# %load_ext tensorboard
# %tensorboard --logdir=./my_logs --port=6006
y_pred = model.predict(X_test.values)
model.evaluate(X_test.values, y_test)
#Calculo de MAPE
def mean_absolute_percentage_error(y_true, y_pred):
y_true, y_pred = np.array(y_true), np.array(y_pred)
return np.mean(np.abs((y_true - y_pred) / y_true)) * 100
print("RMSE: ", np.sqrt(mean_squared_error(y_test, y_pred)))
print("MAE: ", mean_absolute_error(y_test, y_pred))
print("MAPE: ", mean_absolute_percentage_error(y_test, y_pred))
plt.figure(figsize=(32, 10))
sns.lineplot(y=y_pred.flatten(), x=range(len(y_pred.flatten())), alpha=0.5, label='Prediction')
sns.lineplot(y=y_test, x=range(len(y_test)), alpha=0.5, label='Test Values')
plt.plot(history.history['mean_squared_error'])
plt.title('Aprendizaje de la Red')
plt.ylabel('MSE')
plt.xlabel('Epoca')
"""## SVM"""
from sklearn.svm import SVR
svm_reg = SVR()
start_time = time.time()
svm_reg.fit(X_train, y_train)
print("Time:", time.time() - start_time)
y_pred = svm_reg.predict(X_test)
time.time()
print("RMSE: ", np.sqrt(mean_squared_error(y_test, y_pred)))
print("MAE: ", mean_absolute_error(y_test, y_pred))
print("MAPE: ", mean_absolute_percentage_error(y_test, y_pred))
"""# Gradient Boosted Trees"""
from lightgbm import LGBMRegressor
from xgboost import XGBRegressor
lgbm_reg = LGBMRegressor(
n_estimators=1000,
random_state=42,
objective='mape',
num_iterations=5000,
)
start_time = time.time()
#Entrenamiento de GBT
lgbm_reg.fit(
X_train,
y_train,
eval_set=(X_valid, y_valid),
eval_metric='mape',
early_stopping_rounds=200
)
print("Time: ", time.time() - start_time)
y_pred = lgbm_reg.predict(X_test)
print("RMSE: ", np.sqrt(mean_squared_error(y_test, y_pred)))
print("MAE: ", mean_absolute_error(y_test, y_pred))
print("MAPE: ", mean_absolute_percentage_error(y_test, y_pred)) | StarcoderdataPython |
1693904 | <filename>code/babymapping_1219/Data_zoo/vae_parents.py<gh_stars>1-10
import os
import glob
import imageio
import cv2
import torch
import numpy as np
#from base import BaseData #1
from Data_zoo.base import BaseData #2
import yaml
from easydict import EasyDict as edict
class Vae_parents(BaseData):
def __init__(self, args):
super(Vae_parents, self).__init__(args)
self._op_init_param(args.dataset) #After this, all parameters defined in yaml can be used.
self.file_list_father, self.file_list_mother = self._scan_files(args.root_dir, args=args.dataset)
def _op_init_param(self, args_d):
''' Rewrite or use default. This func is to get more parameters
that belong to args.DATASEt_CONFIG. Uncomment one to select
another one.
'''
# Style 1: Use default method defined by father class.
super()._op_init_param(args_d)
# Style 2: Rewrite by yourself
#self.xxx = 'xxx'
''' Image normalize'''
def _op_image_normalize(self, img, max_value=255.):
'''
input: tensor, value:0.0-255.0
output: tensor, value:0.0-1.0
This function alse can be rewrite by user to have a customized value scope.
'''
img = img.div(max_value) #0-1
img = img.mul(2.0).add(-1.0) #0-2 -> -1-1
return img
def _op_readasTensor(self, path):
''' Rewrite or use default. This func is to get more parameters
that belong to args.DATASEt_CONFIG. Uncomment one to select
another one.
'''
# Style 1: Use default method defined by father class.
img = super()._op_readasTensor(path)
assert img.shape[0]==3, '{}'.format(path)
return img
# Style 2: Rewrite by yourself
def _scan_files(self, scan_dir, args=None)->list:
ext = args.ext
phase = args.phase
scan_dir = os.path.join(scan_dir, phase)
assert os.path.isdir(scan_dir)
filepath_list_father = []
filepath_list_mother = []
filepath_list_child = []
for root, dirs, files in os.walk(scan_dir):
for filepath in files:
if ext in filepath and 'ori' not in filepath:
if int(filepath[:2])==1: #father
filepath_list_father.append(os.path.join(root, filepath))
elif int(filepath[:2])==2: #mother
filepath_list_mother.append(os.path.join(root, filepath))
else: #child
filepath_list_child.append(os.path.join(root, filepath))
return filepath_list_father, filepath_list_mother #Return all files in scan_dir, not include ori.png
''' Customized functions
'''
def disentangle_label(self, label):
# input label should be a string
# for father and mother, the family status is 1, while children is larger than 2
# gender: 3rd; skin color: 4th; age: 5th; emotion: 6th; glass: 7th; moustache: 8th
# these are binary labels, and only gender, age, emotion, glass, and moustache are taken into account
if label[:2] == '01' or label[:2] == '02':
new_label = str(int(label[:2])) + ',' + label[2] + ',' + label[4] + ',' + label[5] + ',' + label[6] + ',' + label[7]
else:
new_label = '0' + ',' + label[2] + ',' + label[4] + ',' + label[5] + ',' + label[6] + ',' + label[7]
new_label = np.fromstring(new_label, dtype = int, sep = ',')
new_label = torch.from_numpy(new_label)
return new_label
def binarize_label(self, label):
# index value meaning value meaning
# 0 0 child 1 parent
# 1 0 woman 1 man
# 2 0 older 1 younger
# 3 0 smile 1 not smile
# 4 0 glass 1 no glass
# 5 0 moustache 1 no moustache
# Male | Eyeglasses | Mustache | Smiling: 1 = True, 0 = False
attr = []
if label[1] == 1:
attr.append(1)
if label[1] == 2:
attr.append(0)
if label[4] == 1:
attr.append(1)
if label[4] == 2:
attr.append(0)
if label[5] == 1:
attr.append(1)
if label[5] == 2:
attr.append(0)
if label[3] == 2:
attr.append(1)
if label[3] == 1 or label[3] == 3:
attr.append(0)
attr = np.array(attr)
attr = torch.from_numpy(attr).float()
assert attr.shape[0]==4, '{}'.format(label)
return attr
def __len__(self):
return min(len(self.file_list_father), len(self.file_list_mother))
def __getitem__(self, idx):
img_father = self._op_readasTensor(self.file_list_father[idx]) #Read as a tensor, CxHxW, value range:0.0-255.0
img_mother = self._op_readasTensor(self.file_list_mother[idx]) #Read as a tensor, CxHxW, value range:0.0-255.0
img_father = self._op_image_normalize(img_father) #normalize to -1,1
img_mother = self._op_image_normalize(img_mother) #normalize to -1,1
imglabel_father = self.file_list_father[idx].split('/')[-1][:-4] #'01122222'
imglabel_mother = self.file_list_mother[idx].split('/')[-1][:-4] #'02122222'
imglabel_father = self.disentangle_label(imglabel_father)
imglabel_mother = self.disentangle_label(imglabel_mother)
imglabel_father = self.binarize_label(imglabel_father)
imglabel_mother = self.binarize_label(imglabel_mother)
return img_father, img_mother, imglabel_father, imglabel_mother #img shape:[C,H,W], value:-1~1, imglabel shape:[4]
if __name__=='__main__':
args = edict(yaml.load(open('../yaml/base.yaml', 'r')))
# args.
pdata = Example(args.DATASET_CONFIG)
| StarcoderdataPython |
1690192 | # -*- coding: utf-8 -*-
"""
@brief test log(time=2s)
"""
import unittest
from sklearn.ensemble import RandomForestClassifier
from pyquickhelper.pycode import ExtTestCase
from pymlbenchmark.benchmark.sklearn_helper import get_nb_skl_base_estimators
from pymlbenchmark.datasets import random_binary_classification
class TestSklearnHelper(ExtTestCase):
def test_get_nb_skl_base_estimators(self):
X, y = random_binary_classification(40, 4)
rf = RandomForestClassifier(max_depth=2, n_estimators=4)
rf.fit(X, y)
n1 = get_nb_skl_base_estimators(rf, fitted=False)
n2 = get_nb_skl_base_estimators(rf, fitted=True)
self.assertEqual(n1, 2)
self.assertEqual(n2, 5)
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
3384089 | <reponame>dn757657/please-delete-me<filename>setup.py
from distutils.core import setup
setup(name='ct_data',
version='0.1.1',
packages=['ct_data'],
license='MIT',
description = 'finance management tool',
author = 'Dan',
author_email = '<EMAIL>',
url = 'https://github.com/dn757657/ct_data2.git',
download_url = 'https://github.com/dn757657/ct_data/archive/refs/tags/0.1.1.tar.gz',
keywords = ['Management', 'finance', 'automation'],
install_requires=[
'pandas',
'tabulate',
'web3',
'python-dateutil',
'textblob',
'colorama',
'docopt',
'qtrade',
'pandas_datareader',
'textblob',
],
classifiers=[
'Development Status :: 3 - Alpha', # Chose either "3 - Alpha", "4 - Beta" or "5 - Production/Stable" as the current state of your package
'Intended Audience :: Developers', # Define that your audience are developers
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License', # Again, pick a license
'Programming Language :: Python :: 3', #Specify which pyhton versions that you want to support
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
)
| StarcoderdataPython |
195969 | # -*- coding: utf-8 -*-
from mrjob.job import MRJob
class SalesRanker(MRJob):
def within_past_week(self, timestamp):
"""Return True if timestamp is within past week, False otherwise."""
...
def mapper(self, _, line):
"""Parse each log line, extract and transform relevant lines.
Emit key value pairs of the form:
(foo, p1), 2
(bar, p1), 2
(bar, p1), 1
(foo, p2), 3
(bar, p3), 10
(foo, p4), 1
"""
timestamp, product_id, category, quantity = line.split('\t')
if self.within_past_week(timestamp):
yield (category, product_id), quantity
def reducer(self, key, value):
"""Sum values for each key.
(foo, p1), 2
(bar, p1), 3
(foo, p2), 3
(bar, p3), 10
(foo, p4), 1
"""
yield key, sum(values)
def mapper_sort(self, key, value):
"""Construct key to ensure proper sorting.
Transform key and value to the form:
(foo, 2), p1
(bar, 3), p1
(foo, 3), p2
(bar, 10), p3
(foo, 1), p4
The shuffle/sort step of MapReduce will then do a
distributed sort on the keys, resulting in:
(category1, 1), product4
(category1, 2), product1
(category1, 3), product2
(category2, 3), product1
(category2, 7), product3
"""
category, product_id = key
quantity = value
yield (category, quantity), product_id
def reducer_identity(self, key, value):
yield key, value
def steps(self):
"""Run the map and reduce steps."""
return [
self.mr(mapper=self.mapper,
reducer=self.reducer),
self.mr(mapper=self.mapper_sort,
reducer=self.reducer_identity),
]
if __name__ == '__main__':
HitCounts.run()
| StarcoderdataPython |
3311291 | <filename>src/news.py
try:
from webdriver_manager.chrome import ChromeDriverManager
except: raise ImportError("'webdriver-manager' package not installed")
try:
from selenium.webdriver.common.keys import Keys
from selenium import webdriver
except: raise ImportError("'selenium' package not installed")
from bs4 import BeautifulSoup
import pandas as pd
import time
usr_agent = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive',
}
def scrappi(n_pages, genre):
if genre not in ['national', 'business', 'sports', 'world', 'politics', 'technology', 'startup', 'entertainment',
'miscellaneous', 'hatke', 'science', 'automobile']:
raise ValueError("'genre' value not exists")
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument('--headless')
chrome_options.headless = True
driver = webdriver.Chrome(ChromeDriverManager(print_first_line=False).install(), options = chrome_options)
driver.create_options()
driver.get('https://inshorts.com/en/read/'+genre)
for _ in range(n_pages):
driver.find_element_by_tag_name('body').send_keys(Keys.END)
time.sleep(3)
driver.find_element_by_id('load-more-btn').click()
text_field = driver.find_element_by_id('load-more-btn')
html = driver.page_source
soup = BeautifulSoup(html, 'html.parser')
main = soup.find_all('div', {"class": "news-card z-depth-1"})
lst = []
for details in main:
dictionary={}
dictionary['Headlines'] = (details.find('a', {"class": "clickable"}).text).replace('\n', '')
dictionary['Time'] = details.find('span', {"class": "time"}).text
date = details.find('div', {"class": "news-card-author-time news-card-author-time-in-title"}).find_all('span')
dictionary['Date'] = date[3].text
dictionary['News'] = details.find('div', {"itemprop": "articleBody"}).text
lst.append(dictionary)
return pd.DataFrame(lst) | StarcoderdataPython |
3259035 | <filename>MITx/6.00.1x/Week 2/Lecture_3/strings.py<gh_stars>0
# String and Loops
s1 = "abcdefgh"
print('String is: ', s1)
print('String reversed is: ', s1[::-1])
# Code Sample
iteration = 0
count = 0
while iteration < 5:
# the variable 'letter' in the loop stands for every
# character, including spaces and commas!
for letter in "hello, world":
count += 1
print("Iteration " + str(iteration) + "; count is: " + str(count))
iteration += 1
| StarcoderdataPython |
3255701 | <reponame>rbrady/os-migrate
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from pprint import pformat
import re
from ansible import errors
def stringfilter(items, queries, attribute=None):
"""Filter a `items` list according to a list of `queries`. Values from
`items` are kept if they match at least one query. The original
`items` list is untouched but the result list uses the same data
(not a deep copy).
If `attribute` is None, it is assumed that `items` is a list of
strings to be filtered directly. If `attribute` is provided, it is
assumed that `items` is a list of dicts, and `queries` will tested
against value under `attribute` key in each dict.
`queries` is a list where each item can be:
- string: String equality match is performed.
- dict with single key `regex`: The value of `regex` is a Python
regular expression, and a regex match is performed.
Returns: a list - subset of `strings` where each item matched one
or more `queries`
"""
result = []
for item in items:
if attribute is not None:
if not isinstance(item, dict):
raise errors.AnsibleFilterError(
"stringfilter: 'attribute' parameter provided "
"but list item is not dict: {0}".format(pformat(item))
)
if attribute not in item:
raise errors.AnsibleFilterError(
"stringfilter: 'attribute' is {0} "
"but it was not found in list item: {1}"
.format(pformat(attribute), pformat(item))
)
string = item[attribute]
else:
if not isinstance(item, str):
raise errors.AnsibleFilterError(
"stringfilter: list item is not string: {0}"
.format(pformat(item))
)
string = item
for query in queries:
if isinstance(query, str):
if query == string:
result.append(item)
break
elif isinstance(query, dict) and query.get('regex'):
if re.search(query['regex'], string):
result.append(item)
break
else:
raise errors.AnsibleFilterError(
"stringfilter: unrecognized query: {0}"
.format(pformat(query))
)
return result
class FilterModule(object):
def filters(self):
return {
'stringfilter': stringfilter,
}
| StarcoderdataPython |
4811477 | from flask_pymongo import PyMongo
from flask import Flask, render_template, redirect
import scrape_mars
app = Flask(__name__)
# Use flask_pymongo to set up mongo connection
app.config["MONGO_URI"] = "mongodb://localhost:27017/mission_to_mars_app"
mongodb = PyMongo(app)
# Create main page
@app.route("/")
def index():
mars_data = mongo.db.mars_data.find_one()
return render_template("index.html", data = mars_data)
# Create scrape page
@app.route("/scrape")
def scraper():
mars_data = mongo.db.mars_data
mars_item_data = scrape_mars.scrape()
mars_data.update({}, mars_item_data, upsert=True)
return redirect("/", code=302)
if __name__ == "__main__":
app.run(debug=True)
| StarcoderdataPython |
3361986 | import math
import random
import time
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from django.db.models import Q
from movies.models import Movie, Score
from argparse import ArgumentParser
class Command(BaseCommand):
help = 'Create test scores'
def add_arguments(self, parser: ArgumentParser):
parser.add_argument('--percentage', type=int, default=100, choices=range(1, 101))
parser.add_argument('--median', type=float, default=7)
parser.add_argument('--max_commit_count', type=float, default=float('inf'))
def handle(self, *args, **options):
percentage = options['percentage']
users = User.objects.filter(Q(username__startswith='TestUser')).all()
users_to_score = int(len(users) / 100 * percentage)
median: float = options['median'] - 1
assert 0 <= median <= 9
max_commit_count = options['max_commit_count']
Score.objects.filter(user__username__startswith='TestUser').delete()
program_start = time.time()
scores = []
for movie in Movie.objects.all():
start = time.time()
print(movie)
# weights = random.choices(range(1, 11), k=10)
weights: list = [1, 2] + [3] * 5 + [10] * 3
if not median.is_integer():
remains = int(median * 10) % 10
weights[math.floor(median)] = 60 + (15 - remains * 1.5)
weights[math.ceil(median)] = 60 + remains * 1.5
else:
weights[int(median)] = 135
if movie.title == 'Комната':
weights[9] = math.inf
scores.extend([Score(movie=movie,
user=user,
value=random.choices(range(1, 11), weights, k=1)[0])
for user in random.choices(users, k=users_to_score)])
if len(scores) >= max_commit_count:
print(f'Bulk dump for {len(scores)} entries')
Score.objects.bulk_create(scores)
scores.clear()
print(f'Time: {time.time() - start}')
print(f'Score collected! Time: {time.time() - program_start}')
Score.objects.bulk_create(scores)
print(f'Final Time: {time.time() - program_start}')
| StarcoderdataPython |
3380912 | #!/usr/local/bin/python3.4
import os
import sys
import re
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "papersoccer.settings")
import django
django.setup()
from schemes.models import KurnikReplay
game_date = sys.argv[1]
start_id = int(sys.argv[2])
end_id = int(sys.argv[3])+1
for id in range(start_id,end_id):
replay = open('labs/' + game_date + '/' + str(id) + '.txt', 'r')
replay_lines = replay.readlines()
replay.close()
is_second_player = replay_lines[10].strip()
if (is_second_player == '' or is_second_player == '1-0' or is_second_player == '0-1'):
print("Ta partia nie zostanie uwzględniona: " + str(id))
else:
# 1. Moves
moves = ''
for i in range(12,len(replay_lines)-1):
moves += replay_lines[i].strip() + ' '
clean_moves = re.sub(r'\d{1,2}\.', r'', moves)
all_moves = clean_moves.split()
all_moves_string = ''
i = 0
for move in all_moves:
if move == '1-0' or move == '0-1':
result = move
else:
if i == 0:
all_moves_string = all_moves_string + move
else:
all_moves_string = all_moves_string + ' ' + move
i += 1
# 2. Other informations
tmp_date = replay_lines[2].strip()
date = re.sub(r'\[Date \"(\d{4}).(\d{2}).(\d{2})\"\]', r'\3.\2.\1', tmp_date)
tmp_time = replay_lines[7].strip()
time = re.sub(r'\[Time \"(\d{2}):(\d{2}):(\d{2})\"\]', r'\1:\2:\3', tmp_time)
tmp_round_time = replay_lines[8].strip()
round_time = re.sub(r'\[TimeControl \"(\d{1,3})\"\]', r'\1', tmp_round_time)
tmp_player1 = replay_lines[4].strip()
player1 = re.sub(r'\[Black \"(.+?)\"\]', r'\1', tmp_player1)
tmp_player2 = replay_lines[5].strip()
player2 = re.sub(r'\[White \"(.+?)\"\]', r'\1', tmp_player2)
tmp_elo1 = replay_lines[9].strip()
elo1 = re.sub(r'\[BlackElo \"(\d{1,4})\"\]', r'\1', tmp_elo1)
tmp_elo2 = replay_lines[10].strip()
elo2 = re.sub(r'\[WhiteElo \"(\d{1,4})\"\]', r'\1', tmp_elo2)
"""
print("Date: " + date + " - " + time + " - " + round_time + " sekund")
print("Player 1: " + player1)
print("Player 2: " + player2)
print("ELO 1: " + elo1)
print("ELO 2: " + elo2)
print("Moves: " + all_moves_string)
print("Result: " + result)
"""
# 3. Add to database
add_replay = KurnikReplay(name=str(id), player1=player1, player2=player2, replay_date=date, replay_time=time, replay_round=round_time, player1_elo=elo1, player2_elo=elo2, moves=all_moves_string, result=result)
add_replay.save()
| StarcoderdataPython |
1776865 | <reponame>UpperLEFTY/worldpay-within-sdk
import InterruptedException
import WPWithinWrapperImpl
import WWTypes
import time
def discoverDevices(): # throws WPWithinGeneralException {
devices = wpw.deviceDiscovery(8000)
if devices != None and len(devices) > 0:
print "{0} services found:\n".format(len(devices))
for svcMsg in devices:
print "Device Description: {0}\n".format(svcMsg.getDeviceDescription())
print "Hostname: {0}\n".format(svcMsg.getHostname())
print "Port: {0}\n".format(svcMsg.getPortNumber())
print "URL Prefix: {0}\n".format(svcMsg.getUrlPrefix())
print "ServerId: {0}\n".format(svcMsg.getServerId())
print "Scheme: {0}\n".format(svcMsg.getScheme()) # debb kev this has gone missing...?
print "--------"
else:
if devices != None:
print "No services found... devices was None"
else:
print "No services found... devices length: " + len(devices)
return devices
def connectToDevice(svcMsg): # throws WPWithinGeneralException {
card = WWTypes.WWHCECard()
card.setFirstName("Bilbo")
card.setLastName("Baggins")
card.setCardNumber("5555555555554444")
card.setExpMonth(11)
card.setExpYear(2018)
card.setType("Card")
card.setCvc("113")
wpw.initConsumer("http://", svcMsg.getHostname(), svcMsg.getPortNumber(), svcMsg.getUrlPrefix(), svcMsg.getServerId(), card, {"psp_name":"worldpayonlinepayments","api_endpoint":"https://api.worldpay.com/v1"})
def getAvailableServices(): #throws WPWithinGeneralException {
services = wpw.requestServices()
print "{0} services found\n".format(len(services))
if services != None and len(services) > 0:
for svc in services:
print "Service:"
print "Id: {0}\n".format(svc.getServiceId())
print "Description: {0}\n".format(svc.getServiceDescription())
print "------"
return services
def getServicePrices(serviceId): # throws WPWithinGeneralException {
prices = wpw.getServicePrices(serviceId)
print "{0:10.2f} prices found for service id {1}\n".format(len(prices), serviceId)
if prices != None and len(prices) > 0:
for price in prices:
print "Price:"
print "Id: {0}\n".format(price.getId())
print "Description: {0}\n".format(price.getDescription())
print "UnitId: {0}\n".format(price.getUnitId())
#print "UnitDescription: {0}\n".format(price.getUnitDescription()) #not likey this some reason
#print "Unit Price Amount: {10.2f}\n".format(price.getPricePerUnit().getAmount()) #not likey this... :(
#print "Unit Price CurrencyCode: {0}\n".format(price.getPricePerUnit().getCurrencyCode()) #not likey this either...
print "------"
return prices
def getServicePriceQuote(serviceId, numberOfUnits, priceId): # throws WPWithinGeneralException {
tpr = wpw.selectService(serviceId, numberOfUnits, priceId)
if tpr != None:
print "Did retrieve price quote:"
print "Merchant client key: {0}\n".format(tpr.getMerchantClientKey())
print "Payment reference id: {0}\n".format(tpr.getPaymentReferenceId())
print "Units to supply: {0:10.2f}\n".format(tpr.getUnitsToSupply())
#print "Currency code: {0}\n".format(tpr.getCurrencyCode()) #TODO fix this
print "Total price: {0:10.2f}\n".format(tpr.getTotalPrice())
else:
print "Result of select service is None"
return tpr
def purchaseService(serviceId, pReq): # throws WPWithinGeneralException {
pResp = wpw.makePayment(pReq)
sdt = pResp.getServiceDeliveryToken()
if pResp != None:
print 'Payment response:'
print "Total paid: {0:10.2f}\n".format(pResp.getTotalPaid())
print "ServiceDeliveryToken.issued: {0}\n".format(sdt.getIssued()) #not coming through right
print "ServiceDeliveryToken.expiry: {0}\n".format(sdt.getExpiry())
print "ServiceDeliveryToken.key: %{0}\n".format(sdt.getKey())
print "ServiceDeliveryToken.signature: {0}\n".format(sdt.getSignature())
print "ServiceDeliveryToken.refundOnExpiry: {0}\n".format(sdt.getRefundOnExpiry())
beginServiceDelivery(serviceId, sdt, 5)
else:
print 'Result of make payment is None..'
return pResp
def beginServiceDelivery(serviceID, token, unitsToSupply): # throws WPWithinGeneralException {
print 'Calling beginServiceDelivery()'
print str(token)
if token == None:
print "Token empty at runConsumer side"
else:
print "Token not empty at runConsumer side"
wpw.beginServiceDelivery(serviceID, token, unitsToSupply)
try:
print 'Sleeping 10 seconds..'
time.sleep(10)
endServiceDelivery(serviceID, token, unitsToSupply)
except InterruptedException as e:
print e
def endServiceDelivery(serviceID, token, unitsReceived): # throws WPWithinGeneralException {
print 'Calling endServiceDelivery()'
wpw.endServiceDelivery(serviceID, token, unitsReceived)
def run():
print 'Starting Consumer Example Written in Python.'
global wpw
wpw = WPWithinWrapperImpl.WPWithinWrapperImpl('127.0.0.1', 8778, False)
try:
wpw.setup("my-device", "an example consumer device")
wpwDevice = wpw.getDevice()
print "::" + wpwDevice.getUid() + ":" + wpwDevice.getName() + ":" + wpwDevice.getDescription() + ":" + str(wpwDevice.getServices()) + ":" + wpwDevice.getIpv4Address() + ":" + wpwDevice.getCurrencyCode()
if wpwDevice != None:
print "Successfully got a device"
devices = discoverDevices()
if devices != None:
onlyRunOnce = 0
for svcMsg in devices:
# Should pick the first device discovered
onlyRunOnce = onlyRunOnce + 1
connectToDevice(svcMsg)
svcDetails = getAvailableServices()
onlyRunOnce2 = 0
if svcDetails != None:
for svcDetail in svcDetails:
onlyRunOnce2 = onlyRunOnce2 + 1
svcPrices = getServicePrices(svcDetail.getServiceId())
if svcPrices != None:
onlyRunOnce3 = 0
for svcPrice in svcPrices:
onlyRunOnce3 = onlyRunOnce3 + 1
#Select the first price in the list
tpr = getServicePriceQuote(svcDetail.getServiceId(), 5, svcPrice.getId())
print 'Client ID: {0}\n'.format(tpr.getClientId())
print 'Server ID: {0}\n'.format(tpr.getServerId())
paymentResponse = purchaseService(svcDetail.getServiceId(), tpr)
if onlyRunOnce3 != 0:
break
if onlyRunOnce2 != 0:
break
if onlyRunOnce != 0:
break
else:
print "Could not get device"
wpw.stopRPCAgent()
except WWTypes.WPWithinGeneralException as wpge:
print wpge
run()
| StarcoderdataPython |
1606933 | # -*- coding:utf-8 -*-
import uuid
import pprint
from datetime import datetime
def DD(vars):
pprint.pprint(vars)
def get_uuid():
uuid_1 = uuid.uuid1()
uuid_4 = uuid.uuid4()
return '%s-%s' % (uuid_1, uuid_4)
def get_now_timestamp():
return datetime.now().strftime('%Y%m%d%H%M%S')
class Common(object):
def __init__(self, *args, **kwargs):
pass
def DD(self, vars):
DD(vars)
def get_uuid(self):
return get_uuid()
def get_create_date(self):
return get_now_timestamp()
| StarcoderdataPython |
172855 | <gh_stars>10-100
"""Reproduce some plots from <NAME>'s arXiv:astro-ph/9905116v4
"""
from __future__ import absolute_import, division, print_function
import inspect
import numpy
import matplotlib.pyplot as pylab
import cosmolopy.distance as cd
import cosmolopy.constants as cc
def test_figure1():
"""Plot Hogg fig. 1: The dimensionless proper motion distance DM/DH.
The three curves are for the three world models, Einstein-de
Sitter (omega_M, omega_lambda) = (1, 0), solid; low-density,
(0.05, 0), dotted; and high lambda, (0.2, 0.8), dashed.
Hubble distance DH = c / H0
z from 0--5
DM / DH from 0--3
"""
z = numpy.arange(0, 5.05, 0.05)
cosmo = {}
cosmo['omega_M_0'] = numpy.array([[1.0],[0.05],[0.2]])
cosmo['omega_lambda_0'] = numpy.array([[0.0],[0.0],[0.8]])
cosmo['h'] = 0.5
cd.set_omega_k_0(cosmo)
linestyle = ['-', ':', '--']
dh = cd.hubble_distance_z(0, **cosmo)
dm = cd.comoving_distance_transverse(z, **cosmo)
pylab.figure(figsize=(6,6))
for i in range(len(linestyle)):
pylab.plot(z, (dm/dh)[i], ls=linestyle[i])
#pylab.plot(z, (dm_err/dh)[i], ls=linestyle[i])
pylab.xlim(0,5)
pylab.ylim(0,3)
pylab.xlabel("redshift z")
pylab.ylabel(r"proper motion distance $D_M/D_H$")
pylab.title("compare to " + inspect.stack()[0][3].replace('test_', '') +
" (astro-ph/9905116v4)")
def test_figure2():
"""Plot Hogg fig. 2: The dimensionless angular diameter distance DA/DH.
The three curves are for the three world models,
- Einstein-de Sitter (omega_M, omega_lambda) = (1, 0) [solid]
: Low-density (0.05, 0) [dotted]
-- High lambda, (0.2, 0.8) [dashed]
Hubble distance DH = c / H0
z from 0--5
DA / DH from 0--0.5
"""
z = numpy.arange(0, 5.05, 0.05)
cosmo = {}
cosmo['omega_M_0'] = numpy.array([[1.0],[0.05],[0.2]])
cosmo['omega_lambda_0'] = numpy.array([[0.0],[0.0],[0.8]])
cosmo['h'] = 0.5
cd.set_omega_k_0(cosmo)
linestyle = ['-', ':', '--']
dh = cd.hubble_distance_z(0, **cosmo)
da = cd.angular_diameter_distance(z, **cosmo)
# Also test the pathway with non-zero z0
da2 = cd.angular_diameter_distance(z, z0=1e-8, **cosmo)
pylab.figure(figsize=(6,6))
for i in range(len(linestyle)):
pylab.plot(z, (da/dh)[i], ls=linestyle[i])
pylab.plot(z, (da2/dh)[i], ls=linestyle[i])
pylab.xlim(0,5)
pylab.ylim(0,0.5)
pylab.xlabel("redshift z")
pylab.ylabel(r"angular diameter distance $D_A/D_H$")
pylab.title("compare to " + inspect.stack()[0][3].replace('test_', '') +
" (astro-ph/9905116v4)")
def test_figure3():
"""Plot Hogg fig. 3: The dimensionless luminosity distance DL/DH
The three curves are for the three world models,
- Einstein-de Sitter (omega_M, omega_lambda) = (1, 0) [solid]
: Low-density (0.05, 0) [dotted]
-- High lambda, (0.2, 0.8) [dashed]
Hubble distance DH = c / H0
z from 0--5
DL / DH from 0--16
"""
z = numpy.arange(0, 5.05, 0.05)
cosmo = {}
cosmo['omega_M_0'] = numpy.array([[1.0],[0.05],[0.2]])
cosmo['omega_lambda_0'] = numpy.array([[0.0],[0.0],[0.8]])
cosmo['h'] = 0.5
cd.set_omega_k_0(cosmo)
linestyle = ['-', ':', '--']
dh = cd.hubble_distance_z(0, **cosmo)
dl = cd.luminosity_distance(z, **cosmo)
pylab.figure(figsize=(6,6))
for i in range(len(linestyle)):
pylab.plot(z, (dl/dh)[i], ls=linestyle[i])
pylab.xlim(0,5)
pylab.ylim(0,16)
pylab.xlabel("redshift z")
pylab.ylabel(r"luminosity distance $D_L/D_H$")
pylab.title("compare to " + inspect.stack()[0][3].replace('test_', '') +
" (astro-ph/9905116v4)")
def test_figure5():
"""Plot Hogg fig. 5: The dimensionless comoving volume element (1/DH)^3(dVC/dz).
The three curves are for the three world models, (omega_M, omega_lambda) =
(1, 0), solid; (0.05, 0), dotted; and (0.2, 0.8), dashed.
"""
z = numpy.arange(0, 5.05, 0.05)
cosmo = {}
cosmo['omega_M_0'] = numpy.array([[1.0],[0.05],[0.2]])
cosmo['omega_lambda_0'] = numpy.array([[0.0],[0.0],[0.8]])
cosmo['h'] = 0.5
cd.set_omega_k_0(cosmo)
linestyle = ['-', ':', '--']
dh = cd.hubble_distance_z(0, **cosmo)
dVc = cd.diff_comoving_volume(z, **cosmo)
dVc_normed = dVc/(dh**3.)
Vc = cd.comoving_volume(z, **cosmo)
dz = z[1:] - z[:-1]
dVc_numerical = (Vc[:,1:] - Vc[:,:-1])/dz/(4. * numpy.pi)
dVc_numerical_normed = dVc_numerical/(dh**3.)
pylab.figure(figsize=(6,6))
for i in range(len(linestyle)):
pylab.plot(z, dVc_normed[i], ls=linestyle[i], lw=2.)
pylab.plot(z[:-1], dVc_numerical_normed[i], ls=linestyle[i],
c='k', alpha=0.1)
pylab.xlim(0,5)
pylab.ylim(0,1.1)
pylab.xlabel("redshift z")
pylab.ylabel(r"comoving volume element $[1/D_H^3]$ $dV_c/dz/d\Omega$")
pylab.title("compare to " + inspect.stack()[0][3].replace('test_', '') +
" (astro-ph/9905116v4)")
def test_figure6():
"""Plot Hogg fig. 6: The dimensionless lookback time t_L/t_H and age t/t_H.
The three curves are for the three world models,
- Einstein-de Sitter (omega_M, omega_lambda) = (1, 0) [solid]
: Low-density (0.05, 0) [dotted]
-- High lambda, (0.2, 0.8) [dashed]
Hubble distance DH = c / H0
z from 0--5
t/th from 0--1.2
"""
z = numpy.arange(0, 5.05, 0.05)
cosmo = {}
cosmo['omega_M_0'] = numpy.array([[1.0],[0.05],[0.2]])
cosmo['omega_lambda_0'] = numpy.array([[0.0],[0.0],[0.8]])
cosmo['h'] = 0.5
cd.set_omega_k_0(cosmo)
linestyle = ['-', ':', '--']
th = 1/ cd.hubble_z(0, **cosmo)
tl = cd.lookback_time(z, **cosmo)
age = cd.age(z, **cosmo)
pylab.figure(figsize=(6,6))
for i in range(len(linestyle)):
pylab.plot(z, (tl/th)[i], ls=linestyle[i])
pylab.plot(z, (age/th)[i], ls=linestyle[i])
pylab.xlim(0,5)
pylab.ylim(0,1.2)
pylab.xlabel("redshift z")
pylab.ylabel(r"lookback timne $t_L/t_H$")
pylab.title("compare to " + inspect.stack()[0][3].replace('test_', '') +
" (astro-ph/9905116v4)")
if __name__ == "__main__":
test_figure1()
test_figure2()
test_figure3()
test_figure5()
test_figure6()
pylab.show()
| StarcoderdataPython |
3305190 | <filename>stats/SumM3Thresholder.py
#!/usr/bin/env python
# coding=utf-8
#
# ITHI Kafka prototype, consume M3 analysis as they are produced, creates and updates SumM3 files
import sys
import codecs
import datetime
from enum import Enum
import copy
import traceback
import datetime
import math
import m3name
import m3summary
from confluent_kafka import Consumer, Producer
from SumM3Lib import sumM3Message, sumM3Thresholder
# Ack function to detect whether Kafka is still running.
def m3ThresholderAcked(err, msg):
if err is not None:
print("Failed to deliver sumM3 threshold message: %s: %s" % (str(msg), str(err)))
exit(1)
# check the calling arguments
if len(sys.argv) != 3:
print("Usage: " + sys.argv[0] + " <bootstrap.servers> <nb_hours>\n")
exit(1)
try:
nb_hours = int(sys.argv[2], 10)
except:
print("Cannot parse the number of hours: " + sys.argv[2]);
exit(1)
print("bootstrap.servers: " + sys.argv[1])
print("nb hours: " + str(nb_hours))
# create a table of node instances
thr = sumM3Thresholder(nb_hours)
# Create Kafka Consumer instance
c = Consumer({
'bootstrap.servers': sys.argv[1],
'group.id': 'sumM3Consumer'
})
# Subscribe to topic 'm3Analysis'
c.subscribe(['m3Analysis'])
# Create a provider instance.
p = Producer({'bootstrap.servers': sys.argv[1]})
# Process messages
try:
while True:
try:
s3msg_in = sumM3Message()
s3msg_in.poll_kafka(c, 300.0)
if s3msg_in.topic == "":
print("No good message for 300 sec.")
else:
# Check whether this message triggers a threshold
if thr.checkList(s3msg_in):
# this message needs re-broadcasting
msg = thr.node_list[s3msg_in.node_dns].to_string()
print("Sending: " + msg)
p.produce("m3Thresholder", msg.encode(encoding='utf-8', errors='strict'), callback=m3ThresholderAcked)
thr.update(s3msg_in)
except KeyboardInterrupt:
break;
except Exception:
traceback.print_exc()
print("Cannot process m3analysis message: " + s3msg_in.to_string())
break
except KeyboardInterrupt:
pass
finally:
# Leave group and commit final offsets
p.flush()
c.close()
| StarcoderdataPython |
3343202 | <reponame>ArkGame/ArkGameFrame
#!/usr/bin/python
# encoding=utf-8
# author: NickYang
# date: 2019/04/02
from openpyxl import load_workbook
from openpyxl.styles import Border, Side, Font
import time
class my_excel(object):
def __init__(self, excelPath):
self.excelPath = excelPath
self.workbook = load_workbook(excelPath) # 加载excel
self.sheet = self.workbook.active # 获取第一个sheet
self.font = Font(color=None)
self.colorDict = {"red": 'FFFF3030', "green": 'FF008B00'}
self.sheet_names = [] # 表单的名字列表
# 获取表单的名字列表
def get_sheet_names(self):
self.sheet_names = self.workbook.get_sheet_names()
return self.sheet_names
# 设置当前要操作的sheet对象,使用index来获取相应的sheet
def set_sheet_by_index(self, sheet_index):
sheet_name = self.workbook.get_sheet_names()[sheet_index]
self.sheet = self.workbook.get_sheet_by_name(sheet_name)
return self.sheet
# 获取当前默认sheet的名字
def get_default_sheet_name(self):
return self.sheet.title
# 获取 index 的 sheet 名字
def get_sheet_name_by_index(self, sheet_index):
return self.workbook.get_sheet_names()[sheet_index]
# 设置当前要操作的sheet对象,使用sheet名称来获取相应的sheet
def set_sheet_by_name(self, sheet_name):
self.sheet = self.workbook.get_sheet_by_name(sheet_name)
return self.sheet
# 获取默认sheet中最大的行数
def get_max_row_no(self):
return self.sheet.max_row
# 获取默认 sheet 的最大列数
def get_max_col_no(self):
return self.sheet.max_column
# 获取默认sheet的最小(起始)行号
def get_min_row_no(self):
return self.sheet.min_row
# 获取默认sheet的最小(起始)列号
def get_min_col_no(self):
return self.sheet.min_column
# 获取默认 sheet 的所有行对象,
def get_all_rows(self):
return list(self.sheet.iter_rows())
# return list(self.rows)也可以
# 获取默认sheet中的所有列对象
def get_all_cols(self):
return list(self.sheet.iter_cols())
# return list(self.sheet.columns)也可以
# 从默认sheet中获取某一列,第一列从0开始
def get_single_col(self, col_no):
return self.get_all_cols()[col_no]
# 从默认sheet中获取某一行,第一行从1开始
def get_single_row(self, row_no):
return self.get_all_rows()[row_no]
# 从默认sheet中,通过行号和列号获取指定的单元格,注意行号和列号从1开始
def get_cell(self, row_no, col_no):
return self.sheet.cell(row=row_no, column=col_no)
# 从默认sheet中,通过行号和列号获取指定的单元格中的内容,注意行号和列号从1开始
def get_cell_content(self, row_no, col_no):
return self.sheet.cell(row=row_no, column=col_no).value
# 从默认sheet中,通过行号和列号向指定单元格中写入指定内容,注意行号和列号从1开始
# 调用此方法的时候,excel不要处于打开状态
def write_cell_content(self, row_no, col_no, content, font=None):
self.sheet.cell(row=row_no, column=col_no).value = content
self.workbook.save(self.excelPath)
return self.sheet.cell(row=row_no, column=col_no).value
# 从默认sheet中,通过行号和列号向指定单元格中写入当前日期,注意行号和列号从1开始
# 调用此方法的时候,excel不要处于打开状态
def write_cell_current_time(self, row_no, col_no):
time1 = time.strftime("%Y-%m-%d %H:%M:%S")
self.sheet.cell(row=row_no, column=col_no).value = str(time1)
self.workbook.save(self.excelPath)
return self.sheet.cell(row=row_no, column=col_no).value
def save_excel_file(self):
self.workbook.save(self.excelPath)
| StarcoderdataPython |
3223679 | import enum
import time
from datetime import timedelta
from uuid import uuid4
import boto3
from celery.decorators import periodic_task
from celery.schedules import crontab
from django.conf import settings
from django.core.files.storage import default_storage
from django.core.mail import EmailMessage
from django.template.loader import render_to_string
from django.utils.timezone import localtime, now
from hardcopy import bytestring_to_pdf
from care.facility.models.patient import PatientRegistration
from care.facility.models.patient_base import CATEGORY_CHOICES
from care.facility.models.shifting import SHIFTING_STATUS_CHOICES, ShiftingRequest
from care.users.models import District, State, User
from care.utils.whatsapp.send_media_message import generate_whatsapp_message
@periodic_task(run_every=crontab(minute="0", hour="8"))
def run_scheduled_district_reports():
AdminReports(AdminReportsMode.DISTRICT).generate_reports()
class InvalidModeException(Exception):
pass
class UploadNotSupported(Exception):
pass
class AdminReportsMode(enum.Enum):
STATE = "State"
DISTRICT = "District"
class AdminReports:
mode = None
filter_field = ""
unique_object_ids = []
start_date = None
end_date = None
def fetch_unique_districts(self) -> None:
self.unique_object_ids = list(
User.objects.filter(user_type=User.TYPE_VALUE_MAP["DistrictAdmin"], district__isnull=False)
.values_list("district_id", flat=True)
.distinct()
)
def fetch_unique_states(self) -> None:
self.unique_object_ids = list(
User.objects.filter(user_type=User.TYPE_VALUE_MAP["StateAdmin"], state__isnull=False)
.values_list("state_id", flat=True)
.distinct()
)
def __init__(self, mode) -> None:
self.mode = mode
if mode == AdminReportsMode.DISTRICT:
self.filter_field = "district_id"
self.fetch_unique_districts()
elif mode == AdminReportsMode.STATE:
self.filter_field = "state_id"
self.fetch_unique_states()
else:
raise InvalidModeException
self.start_date = (localtime(now()) - timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0)
self.end_date = self.start_date + timedelta(days=1)
def get_object_name(self, object_id):
if self.mode == AdminReportsMode.STATE:
return State.objects.get(id=object_id).name
elif self.mode == AdminReportsMode.DISTRICT:
return District.objects.get(id=object_id).name
def upload_file(self, file_name):
if not settings.USE_S3:
raise UploadNotSupported()
file_path = default_storage.path(file_name)
with open(file_path, "rb") as f:
file_content = f.read()
s3Client = boto3.client(
"s3",
region_name="ap-south-1",
aws_access_key_id=settings.AWS_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY,
)
key = "reports/" + str(uuid4()) + str(int(time.time())) + ".pdf"
s3Client.put_object(
Bucket=settings.AWS_STORAGE_BUCKET_NAME,
Key=key,
Body=file_content,
ContentType="application/pdf",
ACL="public-read",
)
return f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/{key}"
# Summary Functions
def calculate_patient_summary(self, base_filters):
return_dict = {}
base_queryset = PatientRegistration.objects.filter(**base_filters)
return_dict["current_active"] = base_queryset.filter(is_active=True).count()
return_dict["created_today"] = base_queryset.filter(
is_active=True, created_date__gte=self.start_date, created_date__lte=self.end_date
).count()
return_dict["discharged_today"] = base_queryset.filter(
is_active=False,
last_consultation__discharge_date__gte=self.start_date,
last_consultation__discharge_date__lt=self.end_date,
).count()
return return_dict
def caluclate_patient_age_summary(self, base_filters):
return_list = []
base_queryset = PatientRegistration.objects.filter(**base_filters)
age_brakets = [(0, 20), (20, 40), (40, 60), (60, 80), (80, 120)]
for braket in age_brakets:
count = base_queryset.filter(
is_active=True,
created_date__gte=self.start_date,
created_date__lte=self.end_date,
age__gte=braket[0],
age__lt=braket[1],
).count()
return_list.append({"total_count": count, "title": f"{braket[0]}-{braket[1]}"})
return return_list
def caluclate_patient_category_summary(self, base_filters):
return_list = []
base_queryset = PatientRegistration.objects.filter(**base_filters)
for category in CATEGORY_CHOICES:
count = base_queryset.filter(
is_active=True,
created_date__gte=self.start_date,
created_date__lte=self.end_date,
last_consultation__category=category[0],
).count()
return_list.append({"total_count": count, "title": category[1]})
return return_list
def calculate_shifting_summary(self, base_filters):
return_dict = {}
base_queryset = ShiftingRequest.objects.filter(**base_filters)
today_queryset = base_queryset.filter(created_date__gte=self.start_date, created_date__lte=self.end_date)
return_dict["total_up"] = today_queryset.filter(is_up_shift=True).count()
return_dict["total_down"] = today_queryset.filter(is_up_shift=False).count()
return_dict["total_count"] = return_dict["total_up"] + return_dict["total_down"]
return return_dict
def calculate_shifting_status_summary(self, base_filters):
return_list = []
base_queryset = ShiftingRequest.objects.filter(**base_filters)
today_queryset = base_queryset.filter(created_date__gte=self.start_date, created_date__lte=self.end_date)
for status in SHIFTING_STATUS_CHOICES:
total = today_queryset.filter(status=status[0]).count()
emergency = today_queryset.filter(status=status[0], emergency=True).count()
return_list.append({"total_count": total, "emergency_count": emergency, "status": status[1]})
return return_list
def generate_report_data(self, object_id):
final_data = {}
base_filters = {self.filter_field: object_id}
shifting_base_filter = {"patient__" + self.filter_field: object_id}
final_data["patients_summary"] = self.calculate_patient_summary(base_filters)
final_data["patients_age"] = self.caluclate_patient_age_summary(base_filters)
final_data["patients_categories"] = self.caluclate_patient_category_summary(base_filters)
final_data["shifting_summary"] = self.calculate_shifting_summary(shifting_base_filter)
final_data["shifting_status"] = self.calculate_shifting_status_summary(shifting_base_filter)
return final_data
def generate_reports(self):
for object_id in self.unique_object_ids:
data = self.generate_report_data(object_id)
data["object_type"] = self.mode.value
object_name = self.get_object_name(object_id)
data["object_name"] = object_name
data["current_date"] = str(self.start_date.date())
html_string = render_to_string("reports/daily_report.html", data)
file_name = str(int(round(time.time() * 1000))) + str(object_id) + ".pdf"
bytestring_to_pdf(
html_string.encode(),
default_storage.open(file_name, "w+"),
**{
"no-margins": None,
"disable-gpu": None,
"disable-dev-shm-usage": False,
"window-size": "2480,3508",
},
)
self.send_reports(object_name, {self.filter_field: object_id}, file_name)
default_storage.delete(file_name)
def send_email_report(self, object_name, file_name, user):
if not user.email:
return
file = default_storage.open(file_name, "rb")
msg = EmailMessage(
f"Care Summary : {self.mode.value} {object_name} : {self.start_date.date()}",
"Please find the attached report",
settings.DEFAULT_FROM_EMAIL,
(user.email,),
)
msg.content_subtype = "html"
msg.attach(f"{self.mode.value}Report.pdf", file.read(), "application/pdf")
msg.send()
def send_whatsapp_report(self, object_name, public_url, user):
if not user.alt_phone_number:
return
generate_whatsapp_message(object_name, public_url, user.alt_phone_number)
def send_reports(self, object_name, base_filters, file_name):
users = User.objects.all()
if self.mode == AdminReportsMode.STATE:
users = users.filter(user_type=User.TYPE_VALUE_MAP["StateAdmin"], **base_filters)
elif self.mode == AdminReportsMode.DISTRICT:
users = users.filter(user_type=User.TYPE_VALUE_MAP["DistrictAdmin"], **base_filters)
try:
public_url = self.upload_file(file_name)
except UploadNotSupported:
public_url = None
for user in users:
self.send_email_report(object_name, file_name, user)
if public_url:
self.send_whatsapp_report(object_name, public_url, user)
| StarcoderdataPython |
26687 | <reponame>HypoChloremic/fcsan
from analyze import Analyze
import argparse
# ap = argparse.ArgumentParser()
# ap.addargument("-f", "--folder")
# opts = ap.parse_args()
run = Analyze()
run.read()
files = run.files
def indexer():
with open("FACS_INDEX.txt", "w") as file:
for i in files:
run.read(i)
meta = run.meta
str_to_save = f"File: {meta['$FIL']},Date: {meta['$DATE']},\n"
file.write(str_to_save)
indexer() | StarcoderdataPython |
3394334 | <reponame>msbentley/pds4_utils<gh_stars>1-10
#!/usr/bin/python
"""
read.py
"""
from . import common
import os
from pathlib import Path
import pandas as pd
from pds4_tools import pds4_read
from pds4_tools.reader.table_objects import TableManifest
# only show warning or higher messages from PDS4 tools
import logging
pds4_logger = logging.getLogger('PDS4ToolsLogger')
pds4_logger.setLevel(logging.WARNING)
log = logging.getLogger(__name__)
class pds4_df(pd.DataFrame):
"""
Sub-class of pd.DataFrame adding extra meta-data:
-- filename
-- path
"""
_metadata = ['filename', 'path']
@property
def _constructor(self):
return pds4_df
def read_tables(label, label_directory='.', recursive=False, table_name=None, index_col=None, add_filename=False, quiet=False):
"""
Accepts a directory and file-pattern or list and attempts to load the specified table
(or first table, if none is specified) into a merged DataFrane. If the tables
have different columns, tables will be merged.
index_col= can be used to specify the column which will be used as an index to the frame.
add_filename= will add a new column including the label filename if True - this can be
useful to distinguish between otherwise identical records from several products.
"""
if recursive:
selectfiles = common.select_files(label, directory=label_directory, recursive=recursive)
file_list = [file for file in selectfiles]
elif type(label) == list:
if label_directory != '.':
file_list = [os.path.join(label_directory, file) for file in label]
else:
file_list = label
else:
import glob
file_list = glob.glob(os.path.join(label_directory, label))
if len(file_list) == 0:
log.warning('no files found matching pattern {:s}'.format(label))
return None
table = None
# de-dupe list
file_list = list(set(file_list))
for f in file_list:
if table is None:
table = read_table(f, table_name=table_name, index_col=index_col, quiet=quiet)
cols = table.columns
if add_filename:
table['filename'] = table.filename
else:
temp_table = read_table(f, table_name=table_name, index_col=index_col, quiet=quiet)
if temp_table is None:
continue
if set(temp_table.columns) != set(cols):
log.warning('product has different columns names, skipping')
continue
if add_filename:
temp_table['filename'] = temp_table.filename
# table = table.append(temp_table)
table = pd.concat([table, temp_table], axis=0, join='inner')
table.sort_index(inplace=True)
log.info('{:d} files read with {:d} total records'.format(len(file_list), len(table)))
return table
def read_table(label_file, table_name=None, index_col=None, quiet=True):
"""
Reads data from a PDS4 product using pds4_tools. Data are
converted to a Pandas DataFrame and any columns that are
using PDS4 time data types are converted to Timestamps.
By default the first table is read, otherwise the
table_name can be used to specify.
If index_col is set, this field will be used as an index in
the returned pandas DataFrame, otherwise if a time field
is present this will be used.
NOTE: only simple 2D tables can currently be read. Group
fields are skipped with a warning message!
"""
data = pds4_read(label_file, quiet=True)
labelpath = Path(label_file)
num_arrays = 0
tables = []
for structure in data.structures:
if structure.is_array():
num_arrays += 1
elif structure.is_table():
tables.append(structure.id)
if len(tables) == 0:
log.error('no tables found in this product')
return None
if not quiet:
log.info('product {:s} has {:d} tables and {:d} arrays'.format(labelpath.name, len(tables), num_arrays))
if table_name is not None:
if table_name in tables:
table = data[table_name]
else:
log.error('table name {:s} not found in product'.format(table_name))
return None
else:
table = data[tables[0]]
if not quiet:
log.info('using table {:s}'.format(table.id))
# clunky way to get the names of group fields to ignore for now
table_manifest = TableManifest.from_label(data[table.id].label)
time_cols = []
fields = []
group_fields = []
for i in range(len(table_manifest)):
if table_manifest[i].is_group():
continue
name = table_manifest[i].full_name()
if table_manifest.get_parent_by_idx(i):
group_fields.append(table_manifest[i].full_name())
continue
fields.append(name)
data_type = table_manifest[i]['data_type']
if 'Date' in data_type:
time_cols.append(name)
# TODO: fix nested tables (group fields)
# TODO: fix handling of masked arrays (in particular missing vals in CSVs trigger this)
data = pds4_df(table.data, columns=fields)
for field in fields:
data[field] = table.data[field]
for group_field in group_fields:
field_name = group_field.split(',')[1].strip()
field_data = table[group_field]
if field_data.shape[0] != len(data):
log.warn('group field length does not match table length - skipping!')
continue
data[field_name] = None
for idx in range(len(data)):
data[field_name].iat[idx] = field_data[idx]
path, filename = os.path.split(label_file)
data.path = path
data.filename = filename
for col in time_cols:
data[col] = pd.to_datetime(data[col]).dt.tz_localize(None)
if index_col is not None:
if index_col in fields:
data.set_index(index_col, drop=True, inplace=True)
log.info('data indexed with field {:s}'.format(time_cols[0]))
else:
log.warn('requested index field {:s} not found'.format(index_col))
index_col=None
if index_col is None:
if len(time_cols)==0:
log.warning('no time-based columns found, returned data will not be time-indexed')
elif len(time_cols)==1:
data.set_index(time_cols[0], drop=True, inplace=True)
log.info('data time-indexed with field {:s}'.format(time_cols[0]))
else:
if 'TIME_UTC' in data.columns:
data.set_index('TIME_UTC', drop=True, inplace=True)
log.info('data time-indexed with field {:s}'.format(time_cols[0]))
else:
data.set_index(time_cols[0], drop=True, inplace=True)
log.info('data time-indexed with field {:s}'.format(time_cols[0]))
return data
| StarcoderdataPython |
4841213 | import ui_common as uic
from django.shortcuts import redirect
import django.contrib.messages
import metadata
import ezid
import form_objects
import ezidapp.models
import re
import datacite_xml
import os.path
import userauth
from django.utils.translation import ugettext as _
"""
Handles simple and advanced ID creation. If d['id_gen_result'] == 'edit_page'
user is either about to create an ID, or there is an error condition,
(typically field validation) that user is asked to correct.
"""
def _validationErr(action):
return (
_("Identifier could not be ")
+ action
+ _(" as submitted. Please check the highlighted fields below for details.")
)
def index(request):
d = {'menu_item': 'ui_create.index'}
return redirect("ui_create.simple")
@uic.user_login_required
def simple(request):
d = {'menu_item': 'ui_create.simple'}
d["testPrefixes"] = uic.testPrefixes
user = userauth.getUser(request)
if user.isSuperuser:
shoulders = [s for s in ezidapp.models.getAllShoulders() if not s.isTest]
else:
shoulders = user.shoulders.all()
d["prefixes"] = sorted(
[{"namespace": s.name, "prefix": s.prefix} for s in shoulders],
key=lambda p: ("%s %s" % (p["namespace"], p["prefix"])).lower(),
)
if len(d['prefixes']) < 1:
return uic.render(request, 'create/no_shoulders', d)
d = simple_form(request, d)
return uic.renderIdPage(request, 'create/simple', d)
@uic.user_login_required
def advanced(request):
d = {'menu_item': 'ui_create.advanced'}
d["testPrefixes"] = uic.testPrefixes
user = userauth.getUser(request)
if user.isSuperuser:
shoulders = [s for s in ezidapp.models.getAllShoulders() if not s.isTest]
else:
shoulders = user.shoulders.all()
d["prefixes"] = sorted(
[{"namespace": s.name, "prefix": s.prefix} for s in shoulders],
key=lambda p: ("%s %s" % (p["namespace"], p["prefix"])).lower(),
)
if len(d['prefixes']) < 1:
return uic.render(request, 'create/no_shoulders', d)
d = adv_form(request, d)
return uic.renderIdPage(request, 'create/advanced', d)
def simple_form(request, d):
""" Create simple identifier code shared by 'Create ID' and 'Demo' pages.
Takes request and context object, d['prefixes'] should be set before calling.
Returns dictionary with d['id_gen_result'] of either 'method_not_allowed', 'bad_request',
'edit_page' or 'created_identifier: <new_id>'. If process is as expected, also includes
a form object containing posted data and any related errors. """
if request.method == "GET":
REQUEST = request.GET
elif request.method == "POST":
REQUEST = request.POST
else:
d['id_gen_result'] = 'method_not_allowed'
return d
# selects current_profile based on parameters or profile preferred for prefix type
d['internal_profile'] = metadata.getProfile('internal')
if 'current_profile' in REQUEST:
d['current_profile'] = metadata.getProfile(REQUEST['current_profile'])
if d['current_profile'] == None:
d['current_profile'] = metadata.getProfile('erc')
else:
if len(d['prefixes']) > 0 and d['prefixes'][0]['prefix'].startswith('doi:'):
d['current_profile'] = metadata.getProfile('datacite')
else:
d['current_profile'] = metadata.getProfile('erc')
if "form_placeholder" not in d:
d['form_placeholder'] = None
if request.method == "GET":
# Begin ID Creation (empty form)
d['form'] = form_objects.getIdForm(
d['current_profile'], d['form_placeholder'], None
)
d['id_gen_result'] = 'edit_page'
else:
if "current_profile" not in REQUEST or "shoulder" not in REQUEST:
d['id_gen_result'] = 'bad_request'
return d
d['form'] = form_objects.getIdForm(
d['current_profile'], d['form_placeholder'], REQUEST
)
pre_list = [pr['prefix'] for pr in d['prefixes']]
if not _verifyProperShoulder(request, REQUEST, pre_list):
d['id_gen_result'] = 'edit_page'
return d
if d['form'].is_valid():
d = _createSimpleId(d, request, REQUEST)
else:
django.contrib.messages.error(request, _validationErr(_("created")))
d['id_gen_result'] = 'edit_page'
return d
def adv_form(request, d):
""" Like simple_form. Takes request and context object. d['prefixes'] should be set
before calling. Includes addtn'l features:
custom remainder - optional
manual_profile - If true, use custom Datacite XML template
profile_names - User can choose from different profiles
"""
# selects current_profile based on parameters or profile preferred for prefix type
d['manual_profile'] = False
choice_is_doi = False
# Form will be GET request when flipping between shoulders and profiles. Otherwise it's a POST.
if request.method == "GET":
REQUEST = request.GET
elif request.method == "POST":
REQUEST = request.POST
else:
d['id_gen_result'] = 'method_not_allowed'
return d
if ('shoulder' in REQUEST and REQUEST['shoulder'].startswith("doi:")) or (
len(d['prefixes']) > 0 and d['prefixes'][0]['prefix'].startswith('doi:')
):
choice_is_doi = True
if 'current_profile' in REQUEST:
if REQUEST['current_profile'] in uic.manual_profiles:
d = _engage_datacite_xml_profile(request, d, 'datacite_xml')
else:
d['current_profile'] = metadata.getProfile(REQUEST['current_profile'])
if d['current_profile'] == None:
d['current_profile'] = metadata.getProfile('erc')
else:
if choice_is_doi == True:
d = _engage_datacite_xml_profile(request, d, 'datacite_xml')
else:
d['current_profile'] = metadata.getProfile('erc')
if d['manual_profile'] == False:
d['current_profile_name'] = d['current_profile'].name
d['internal_profile'] = metadata.getProfile('internal')
d['profiles'] = [p for p in metadata.getProfiles()[1:] if p.editable]
profs = [
(p.name, p.displayName,) for p in d['profiles']
] + uic.manual_profiles.items()
d['profile_names'] = sorted(profs, key=lambda p: p[1].lower())
# 'datacite_xml' used for advanced profile instead of 'datacite'
d['profile_names'].remove(('datacite', 'DataCite'))
# [TODO: Enhance advanced DOI ERC profile to allow for elements ERC + datacite.publisher or
# ERC + dc.publisher.] For now, just hide this profile.
if choice_is_doi:
d['profile_names'].remove(('erc', 'ERC'))
# Preserve remainder from GET request
if 'remainder' in REQUEST:
d['remainder'] = REQUEST['remainder']
d['remainder_box_default'] = form_objects.REMAINDER_BOX_DEFAULT
if request.method == "GET":
# Begin ID Creation (empty form)
if d['current_profile_name'] == 'datacite_xml':
d['form'] = form_objects.getIdForm_datacite_xml()
else:
d['form'] = form_objects.getAdvancedIdForm(d['current_profile'], request)
d['id_gen_result'] = 'edit_page'
if 'anchor' in REQUEST:
d['anchor'] = REQUEST['anchor']
else: # request.method == "POST"
P = REQUEST
pre_list = [p['prefix'] for p in d['prefixes'] + d['testPrefixes']]
if not _verifyProperShoulder(request, P, pre_list):
d['id_gen_result'] = 'edit_page'
return d
if d['current_profile_name'] == 'datacite_xml':
d = validate_adv_form_datacite_xml(request, d)
if 'id_gen_result' in d:
return d
d = _createAdvancedId(d, request, P)
else:
if "current_profile" not in P or "shoulder" not in P:
d['id_gen_result'] = 'bad_request'
return d
d['form'] = form_objects.getAdvancedIdForm(d['current_profile'], request)
if not (
d['form']['form'].is_valid() and d['form']['remainder_form'].is_valid()
):
django.contrib.messages.error(request, _validationErr(_("created")))
d['id_gen_result'] = 'edit_page'
else:
d = _createAdvancedId(d, request, P)
return d
def _engage_datacite_xml_profile(request, d, profile_name):
# Hack: For now, this is the only manual profile
d['current_profile'] = metadata.getProfile('datacite')
d['manual_profile'] = True
d['current_profile_name'] = profile_name
d['manual_template'] = 'create/_' + d['current_profile_name'] + '.html'
d['polygon_view'] = 'view'
return d
def validate_adv_form_datacite_xml(request, d):
""" Creates/validates datacite advanced (xml) form object using request.POST
from both create/demo and edit areas
Either sets d['id_gen_result'] = 'edit_page', (due to validation issue)
or successfully generates XML (sets d['generated_xml'])
"""
P = request.POST
assert P is not None
identifier = None
if P['action'] == 'create':
action_result = _("created")
else: # action='edit'
action_result = _("modified")
if not P['identifier']:
django.contrib.messages.error(
request, _("Unable to edit. Identifier not supplied.")
)
d['id_gen_result'] = 'edit_page'
return d
identifier = P['identifier']
d['form'] = form_objects.getIdForm_datacite_xml(None, request)
if not form_objects.isValidDataciteXmlForm(d['form']):
django.contrib.messages.error(request, _validationErr(action_result))
d['accordions_open'] = 'open'
d['id_gen_result'] = 'edit_page'
else:
# Testing:
# temp_formElements = datacite_xml.temp_mockFormElements()
# d['generated_xml'] = datacite_xml.temp_mock()
d['generated_xml'] = datacite_xml.formElementsToDataciteXml(
P.dict(),
# temp_formElements,
(P['shoulder'] if 'shoulder' in P else None),
identifier,
)
return d
def _createSimpleId(d, request, P):
s = ezid.mintIdentifier(
request.POST["shoulder"],
userauth.getUser(request, returnAnonymous=True),
uic.assembleUpdateDictionary(
request, d["current_profile"], {"_target": P["target"], "_export": "yes"}
),
)
if s.startswith("success:"):
new_id = s.split()[1]
django.contrib.messages.success(request, _("Identifier Created."))
d["id_gen_result"] = "created_identifier: " + new_id
else:
err = _("Identifier could not be created as submitted") + ": " + s
django.contrib.messages.error(request, err)
d["id_gen_result"] = "edit_page"
return d
def _createAdvancedId(d, request, P):
""" Like _createSimpleId, but also checks for elements on advanced create page:
_status and _export variables; Adds datacite_xml if present. If no remainder
is supplied, simply mints an ID """
# ToDo: Clean this up
if d['current_profile'].name == 'datacite' and 'generated_xml' in d:
to_write = {
"_profile": 'datacite',
'_target': P['target'],
"_status": ("public" if P["publish"] == "True" else "reserved"),
"_export": ("yes" if P["export"] == "yes" else "no"),
"datacite": d['generated_xml'],
}
else:
to_write = uic.assembleUpdateDictionary(
request,
d['current_profile'],
{
'_target': P['target'],
"_status": ("public" if P["publish"] == "True" else "reserved"),
"_export": ("yes" if P["export"] == "yes" else "no"),
},
)
if P['remainder'] == '' or P['remainder'] == form_objects.REMAINDER_BOX_DEFAULT:
s = ezid.mintIdentifier(
P['shoulder'], userauth.getUser(request, returnAnonymous=True), to_write
)
else:
s = ezid.createIdentifier(
P['shoulder'] + P['remainder'],
userauth.getUser(request, returnAnonymous=True),
to_write,
)
if s.startswith("success:"):
new_id = s.split()[1]
django.contrib.messages.success(request, _("Identifier Created."))
d['id_gen_result'] = 'created_identifier: ' + new_id
else:
if "-" in s:
err_msg = re.search(r'^error: .+?- (.+)$', s).group(1)
else:
err_msg = re.search(r'^error: (.+)$', s).group(1)
django.contrib.messages.error(
request, _("There was an error creating your identifier") + ": " + err_msg
)
d['accordions_open'] = 'open'
d['id_gen_result'] = 'edit_page'
return d
def _verifyProperShoulder(request, P, pre_list):
if P['shoulder'] not in pre_list:
django.contrib.messages.error(
request,
_("Unauthorized to create with this identifier prefix")
+ ": "
+ P['shoulder'],
)
return False
return True
| StarcoderdataPython |
3285387 | <reponame>yaowenlong/clique
from pypai import PAI
# Create a PAI cluster
pai = PAI(username='ywl1918', passwd='<PASSWORD>')
# Submit job
pai.submit()
| StarcoderdataPython |
3249425 | <reponame>Pzqqt/MaoMiAV_Videos_Downloader<filename>m3u8_downloader.py
#!/usr/bin/env python3
# encoding: utf-8
import os
import re
import shutil
import tempfile
from time import sleep
from concurrent.futures import ThreadPoolExecutor
from argparse import ArgumentParser
import requests
REQ_HEADERS = {
"User-Agent": ("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 "
"(KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36")
}
class M3u8Downloader:
req_timeout = 15
def __init__(self, url, jobs, proxies, output):
self.url = url
self.jobs = self.set_jobs(jobs)
self.proxies = proxies
self.output = self.adj_file_name(output)
@staticmethod
def set_jobs(jobs):
if jobs <= 1:
return 1
if jobs >= 32:
return 32
jobs_list = [2**x for x in range(6)]
for x in range(5):
if jobs_list[x] <= jobs < jobs_list[x+1]:
return jobs_list[x]
return 8
def run(self):
urls, key_method, key_content = self.get_m3u8(self.url, self.proxies)
temp_dir = tempfile.mkdtemp(prefix="m3u8_")
try:
print("\nDownload ts files...")
file_list = download_file_all(
max_threads_num=self.jobs,
temp_dir=temp_dir,
proxies=self.proxies,
urls=urls,
key_method=key_method,
key_content=key_content,
)
print("\nMerge files...")
self.merge_ts(
temp_dir=temp_dir,
src_files=file_list,
dst_file=self.output
)
file2dir(os.path.join(temp_dir, self.output), os.getcwd())
finally:
remove_path(temp_dir)
print("\nDone!")
@classmethod
def get_m3u8(cls, url, proxies=""):
base_url = url.rsplit("/", 1)[0]
req = requests.get(
url,
headers=REQ_HEADERS, timeout=10, proxies={"http": proxies, "https": proxies}
)
req.raise_for_status()
req_text = req.text
if "#EXT-X-STREAM-INF" in req_text:
return cls.get_m3u8(base_url+"/"+cls.select_m3u8_stream(req_text), proxies)
if "#EXT-X-KEY" in req_text:
re_pattern = r'#EXT-X-KEY:METHOD=(.*?),URI="(.*?)"'
re_result = re.search(re_pattern, req_text)
key_method = re_result.group(1)
key_url = re_result.group(2)
if not key_url.startswith("http"):
key_url = base_url + "/" + key_url
key_content = requests.get(
key_url,
headers=REQ_HEADERS, timeout=10, proxies={"http": proxies, "https": proxies}
).content
else:
key_method = None
key_content = None
urls = []
for line in req_text.splitlines():
line = line.strip()
if not line or line.startswith("#"):
continue
if line.startswith("http"):
urls.append(line)
else:
urls.append(base_url + "/" + line)
return urls, key_method, key_content
@staticmethod
def select_m3u8_stream(m3u8_text):
stream_info = {}
stream_list = []
for line in m3u8_text.splitlines():
line = line.strip()
if not line:
continue
if line.startswith("#EXT-X-STREAM-INF"):
stream_info = {"info": line.split(":", 1)[1]}
continue
if stream_info:
stream_info["sub_url"] = line
stream_list.append(stream_info.copy())
stream_info = {}
print("\n- Found %s stream:" % len(stream_list))
for index, stream_info_ in enumerate(stream_list, 1):
print(" - %s. %s" % (index, stream_info_["info"]))
input_str = input("\n- Input the item number you want to download(default: 1):")
if input_str in [str(x) for x in range(1, len(stream_list)+1)]:
index = int(input_str) - 1
else:
index = 0
return stream_list[index]["sub_url"]
@classmethod
def merge_ts(cls, temp_dir, src_files, dst_file):
pwd_bak = os.getcwd()
os.chdir(temp_dir)
try:
if len(src_files) > 100:
# 列表分割
names_split = [
src_files[i:100+i]
for i in range(0, len(src_files), 100)
]
files_split = []
for i, names in enumerate(names_split):
files_split.append("tmp_%s.mp4" % i)
cls.merge_files(names, files_split[-1])
cls.merge_files(files_split, dst_file)
else:
cls.merge_files(src_files, dst_file)
finally:
os.chdir(pwd_bak)
@staticmethod
def merge_files(files, dst):
# 合并文件
if os.name == "nt":
cmd_str = "copy /b %s %s >nul" % ("+".join(files), dst)
else:
cmd_str = "cat %s > %s" % (" ".join(files), dst)
os.system(cmd_str)
@staticmethod
def adj_file_name(file_name):
""" 调整文件名 过滤不规范的字符 """
for char in (" ", "?", "/", "\\", ":", "*", "\"", "<", ">", "|"):
file_name = file_name.replace(char, "")
return file_name
def download_file_all(max_threads_num, temp_dir, proxies, urls, key_method, key_content):
def download_file(url, file_name):
while True:
print(" - Request %s..." % file_name)
try:
r = requests.get(
url,
headers=REQ_HEADERS, timeout=15, proxies={"http": proxies, "https": proxies}
)
except Exception as error:
if isinstance(error, requests.exceptions.ReadTimeout):
print(" ! Request %s failed, timeout! Try again after 5s.." % file_name)
elif isinstance(error, (requests.exceptions.SSLError, requests.exceptions.ProxyError)):
print(" ! Request %s failed, proxy error! Try again after 5s.." % file_name)
else:
print(" ! %s: %s" % (type(error), error))
print(" ! Request %s failed! Try again after 5s.." % file_name)
sleep(5)
continue
if not r.ok:
print(" ! Request %s %s! Try again after 5s..." % (file_name, r.status_code))
sleep(5)
continue
with open(os.path.join(temp_dir, file_name), "wb") as f:
if cryptor:
f.write(cryptor.decrypt(r.content))
else:
f.write(r.content)
print(" - Download %s OK!" % file_name)
return
if key_method and key_content:
from Crypto.Cipher import AES
cryptor = AES.new(key_content, AES.MODE_CBC, key_content)
else:
cryptor = None
file_list = [str(x) + ".ts" for x in range(len(urls))]
with ThreadPoolExecutor(max_threads_num) as executor1:
executor1.map(download_file, urls, file_list)
return file_list
def mkdir(path):
""" 创建目录 """
if os.path.exists(path):
if not os.path.isdir(path):
try:
os.remove(path)
except:
pass
else:
return
os.makedirs(path)
def file2file(src, dst, move=False):
""" 复制文件到文件
move为True时移动文件而不是复制文件
"""
mkdir(os.path.split(dst)[0])
if move:
shutil.move(src, dst)
else:
shutil.copyfile(src, dst)
return dst
def file2dir(src, dst, move=False):
""" 复制文件到目录(不修改文件名)
move为True时复制后删除原文件
"""
mkdir(dst)
shutil.copy(src, dst)
if move:
os.remove(src)
return os.path.join(dst, os.path.split(src)[1])
def remove_path(path):
""" 移除文件/目录(如果存在的话) """
if os.path.isdir(path):
shutil.rmtree(path)
elif os.path.exists(path):
os.remove(path)
def main():
parser = ArgumentParser()
parser.add_argument("url", help="url for m3u8 file")
parser.add_argument(
"-j", "--jobs", type=int, default=8,
help="number of recipes (jobs)(available: 1~32)(default: 8)")
parser.add_argument(
"-p", "--proxies", default="",
help="use HTTP proxy (address:port)(default: None)"
)
parser.add_argument(
"-d", "--tmpdir",
help="Custom temp dir(default: read from environment variables)"
)
parser.add_argument(
"-o", "--output", default="output.mp4",
help="output file name (default: output.mp4)"
)
args = parser.parse_args()
tempfile.tempdir = args.tmpdir
mkdir(tempfile.gettempdir())
M3u8Downloader(args.url, args.jobs, args.proxies, args.output).run()
if __name__ == '__main__':
main()
| StarcoderdataPython |
3299721 | <filename>PyRods/test/test_rodsInfo.py
# Copyright (c) 2013, University of Liverpool
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# Author : <NAME>
#
import unittest
from common import *
from irods import *
class testRodsInfo(iRODSTestCase):
def test_dataObjInfo_t(self):
v1 = create_dataObjInfo_t()
v2 = create_dataObjInfo_t()
self.assertTrue(test_dataObjInfo_t(v1, v2))
def test_miscSvrInfo_t(self):
v1 = create_miscSvrInfo_t()
v2 = create_miscSvrInfo_t()
self.assertTrue(test_miscSvrInfo_t(v1, v2))
def test_rodsObjStat_t(self):
v1 = create_rodsObjStat_t()
v2 = create_rodsObjStat_t()
self.assertTrue(test_rodsObjStat_t(v1, v2))
def test_tagStruct_t(self):
v1 = create_tagStruct_t()
v2 = create_tagStruct_t()
self.assertTrue(test_tagStruct_t(v1, v2))
def test_specColl_t(self):
v1 = create_specColl_t()
v2 = create_specColl_t()
self.assertTrue(test_specColl_t(v1, v2))
def test_subFile_t(self):
v1 = create_subFile_t()
v2 = create_subFile_t()
self.assertTrue(test_subFile_t(v1, v2))
def test_keyValPair_t(self):
v1 = create_keyValPair_t()
v2 = create_keyValPair_t()
self.assertTrue(test_keyValPair_t(v1, v2))
def test_inxIvalPair_t(self):
v1 = create_inxIvalPair_t()
v2 = create_inxIvalPair_t()
self.assertTrue(test_inxIvalPair_t(v1, v2))
def test_inxIvalPair_t(self):
v1 = create_inxIvalPair_t()
v2 = create_inxIvalPair_t()
self.assertTrue(test_inxIvalPair_t(v1, v2))
def test_rescInfo_t(self):
v1 = create_rescInfo_t()
v2 = create_rescInfo_t()
self.assertTrue(test_rescInfo_t(v1, v2))
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.TestLoader().loadTestsFromTestCase(testRodsInfo))
return suite
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=2).run(suite())
| StarcoderdataPython |
193505 | <filename>lib/vtk_opener.py
import os
import numpy as np
import scipy.misc
import vtk
from vtk import vtkStructuredPointsReader
from vtk.util import numpy_support as VN
from vtk.util.numpy_support import vtk_to_numpy
#%%
# load a vtk file as input
reader = vtk.vtkPolyDataReader()
reader.SetFileName("/home/tkdrlf9202/Datasets/snuh_HCC_sample_1807/MEDIP/HCC_1104.vtk")
reader.ReadAllScalarsOn()
reader.ReadAllVectorsOn()
reader.Update()
#%%
polydata = reader.GetOutput()
points = polydata.GetPoints()
array = points.GetData()
numpy_nodes = vtk_to_numpy(array)
#%%
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.scatter(numpy_nodes[:, 0], numpy_nodes[:, 1], numpy_nodes[:, 2])
plt.show()
| StarcoderdataPython |
42804 | # ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.11.3
# kernelspec:
# display_name: Python 3
# name: python3
# ---
# + [markdown] id="view-in-github" colab_type="text"
# <a href="https://colab.research.google.com/github/always-newbie161/probml-notebooks/blob/jax_vdvae/notebooks/vdvae_jax_cifar_demo.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a>
# + [markdown] id="cTSe7I6g45v8"
# This notebook shows demo working with vdvae in jax and the code used is from [vdvae-jax](https://github.com/j-towns/vdvae-jax) from [<NAME>](https://j-towns.github.io/)
# + [markdown] id="PxtpxTPEMS4C"
# ## Setup
# + id="ipHVirxUHTDJ"
from google.colab import auth
auth.authenticate_user()
# + colab={"base_uri": "https://localhost:8080/"} id="Z6gM2ytSHnO0" outputId="3e63de9d-6808-4cd9-eb1f-08996a6a7fed"
project_id = 'probml'
# !gcloud config set project {project_id}
# + id="a3__DVx74sso" colab={"base_uri": "https://localhost:8080/", "height": 52} outputId="579bc832-9028-49f3-c164-c426d32f66a6"
'''
this should be the format of the checkpoint filetree:
checkpoint_path >> model(optimizer)_checkpoint_file.
checkpoint_path_ema >> ema_checkpoint_file
'''
checkpoint_path='/content/vdvae_cifar10_2.86/latest_cifar10'
# checkpoints are downloaded at these paths.
# vdvae_cifar10_2.86/latest_cifar10 - optimizer+mode
# vdvae_cifar10_2.86/latest_cifar10_ema - ema_params'
# + id="4_RnWXhwIV85" colab={"base_uri": "https://localhost:8080/"} cellView="form" outputId="de8dedaf-bdd3-4fb7-99ee-7cfe96229d1c"
#@title Download checkpoints
# !gsutil cp -r gs://gsoc_bucket/vdvae_cifar10_2.86 ./
# !ls -l /content/vdvae_cifar10_2.86/latest_cifar10
# !ls -l /content/vdvae_cifar10_2.86/latest_cifar10_ema
# + colab={"base_uri": "https://localhost:8080/"} id="z3fThb8PIYHG" outputId="8406f5b2-cb50-42f5-aa78-4dc4f85afb02"
# !git clone https://github.com/j-towns/vdvae-jax.git
# + colab={"base_uri": "https://localhost:8080/"} id="053XPypoMobJ" outputId="0e415f07-00a4-4815-c2c5-288236ac2c98"
# %cd vdvae-jax
# + colab={"base_uri": "https://localhost:8080/"} id="X1hY6VqmNApP" outputId="41014f01-32bf-4377-85e5-e18328d2161a"
# !pip install --quiet flax
# + id="y013geSvWQUg"
import os
try:
os.environ['COLAB_TPU_ADDR']
import jax.tools.colab_tpu
jax.tools.colab_tpu.setup_tpu()
except:
pass
# + colab={"base_uri": "https://localhost:8080/"} id="XDzBF1uZXOlu" outputId="929c368c-4610-49b0-bc94-76b891bc9b0e"
import jax
jax.local_devices()
# + [markdown] id="KrFas8alNwJ0"
# ## Model
# (for cifar10)
# + [markdown] id="4Mr89HhnTbaF"
# ### Setting up hyperparams
# + id="B0QZ6aKoP08z"
from hps import HPARAMS_REGISTRY, Hyperparams, add_vae_arguments
from train_helpers import setup_save_dirs
import argparse
import dataclasses
H = Hyperparams()
parser = argparse.ArgumentParser()
parser = add_vae_arguments(parser)
parser.set_defaults(hps= 'cifar10',conv_precision='highest')
H = dataclasses.replace(H, **vars(parser.parse_args([])))
hparam_sets = [x for x in H.hps.split(',') if x]
for hp_set in hparam_sets:
hps = HPARAMS_REGISTRY[hp_set]
parser.set_defaults(**hps)
H = dataclasses.replace(H, **vars(parser.parse_args([])))
H = setup_save_dirs(H)
# + [markdown] id="NisrtOPlfmef"
# This model is a hierarchical model with multiple stochastic blocks with multiple deterministic layers. You can know about model skeleton by observing the encoder and decoder "strings"
#
# **How to understand the string:**
# * blocks are comma seperated
# * `axb` implies there are `b` res blocks(set of Conv layers) for dimensions `axa`
# * `amb` implies it is a mixin block which increases the inter-image dims from `a` to `b` using **nearest neighbour upsampling** (used in decoder)
# * `adb` implies it's a block with avg-pooling layer which reduces the dims from `a` to `b`(used in encoder)
#
# for more understanding refer to this [paper](https://arxiv.org/abs/2011.10650)
#
#
# + colab={"base_uri": "https://localhost:8080/"} id="-OyvG1KbP2qT" outputId="bc0a16e1-0cbb-4951-c5ef-e8310bc9deb4"
hparams = dataclasses.asdict(H)
for k in ['enc_blocks','dec_blocks','zdim','n_batch','device_count']:
print(f'{k}:{hparams[k]}')
# + id="FGD3wwRxvF3Y"
from utils import logger
from jax.interpreters.xla import DeviceArray
log = logger(H.logdir)
if H.log_wandb:
import wandb
def logprint(*args, pprint=False, **kwargs):
if len(args) > 0: log(*args)
wandb.log({k: np.array(x) if type(x) is DeviceArray else x for k, x in kwargs.items()})
wandb.init(config=dataclasses.asdict(H))
else:
logprint = log
# + colab={"base_uri": "https://localhost:8080/"} id="cABtXQvqSG2Z" outputId="2c43dea8-4c53-44cc-dd91-0c7577d07a7e"
import numpy as np
from jax import lax
import torch
import imageio
from PIL import Image
import glob
from torch.utils.data import DataLoader
from torchvision import transforms
np.random.seed(H.seed)
torch.manual_seed(H.seed)
H = dataclasses.replace(
H,
conv_precision = {'default': lax.Precision.DEFAULT,
'high': lax.Precision.HIGH,
'highest': lax.Precision.HIGHEST}[H.conv_precision],
seed_init =H.seed,
seed_sample=H.seed + 1,
seed_train =H.seed + 2 + H.host_id,
seed_eval =H.seed + 2 + H.host_count + H.host_id,
)
print('training model on ', H.dataset)
# + [markdown] id="Gs8bNNXpTMxZ"
# ### Downloading cifar10 dataset
# + colab={"base_uri": "https://localhost:8080/"} id="4An20_C-SvCT" outputId="023f5c9a-87fd-4ad8-abc3-0945b9fe4374"
# !./setup_cifar10.sh
# + [markdown] id="Js-LK-vojdSw"
# ### Setting up the model, data and the preprocess fn.
# + id="AylLXttfTSca"
from data import set_up_data
H, data_train, data_valid_or_test, preprocess_fn = set_up_data(H)
# + colab={"base_uri": "https://localhost:8080/"} id="GWsr1xszZ_te" outputId="a5ba8d4e-b088-46ec-ac31-b4fbd250618d"
from train_helpers import load_vaes
H = dataclasses.replace(H, restore_path=checkpoint_path)
optimizer, ema_params, start_epoch = load_vaes(H, logprint)
# + colab={"base_uri": "https://localhost:8080/"} id="PEH8BtbmaK4O" outputId="f32e3fa2-746e-404b-bbae-aaca80078568"
start_epoch # no.of.epochs trained
# + colab={"base_uri": "https://localhost:8080/"} id="9nAJ3EGLICEh" outputId="6a47c0b6-aaf0-45a3-8a1c-b0c6bb6b3d40"
# Hparams for the current model
hparams = dataclasses.asdict(H)
for i, k in enumerate(sorted(hparams)):
logprint(f'type=hparam, key={k}, value={getattr(H, k)}')
# + [markdown] id="HS2o9uFqjgyv"
# ### Evaluation
# + colab={"base_uri": "https://localhost:8080/"} id="jhiF_NjEuWQv" outputId="b0d88a47-5af0-4452-d1c0-88d90ef1a71e"
from train import run_test_eval
run_test_eval(H, ema_params, data_valid_or_test, preprocess_fn, logprint)
# + [markdown] id="tppWoc_hypdn"
# ### Function to save and show of batch of images given as a numpy array.
#
#
# + id="AJbKzeuzzGcS"
def zoom_in(fname, shape):
im = Image.open(fname)
resized_im = im.resize(shape)
resized_im.save(fname)
def save_n_show(images, order, image_shape, fname, zoom=True, show=False):
n_rows, n_images = order
im = images.reshape((n_rows, n_images, *image_shape))\
.transpose([0, 2, 1, 3, 4])\
.reshape([n_rows * image_shape[0],
n_images * image_shape[1], 3])
print(f'printing samples to {fname}')
imageio.imwrite(fname, im)
if zoom:
zoom_in(fname, (640, 64)) # w=640, h=64
if show:
display(Image.open(fname))
# + [markdown] id="9TlNptkdd5ME"
# ## Generations
# + id="EcnvaTn3iJfo"
n_images = 10
num_temperatures = 3
image_shape = [H.image_size,H.image_size,H.image_channels]
H = dataclasses.replace(H, num_images_visualize=n_images, num_temperatures_visualize=num_temperatures)
# + [markdown] id="LDHUzIgBbjuX"
# Images will be saved in the following dir
# + colab={"base_uri": "https://localhost:8080/", "height": 0} id="EhJ17q1dfSNu" outputId="fb923dee-dc4d-4e68-e2c5-20f3f41874c1"
H.save_dir
# + [markdown] id="Xm_BYJYjiuzt"
# As the model params are replicated over multiple devices, unreplicated copy of them is made to use it for sampling and generations.
# + id="VJbqZRxWilR9"
from jax import random
from vae import VAE
from flax import jax_utils
from functools import partial
rng = random.PRNGKey(H.seed_sample)
ema_apply = partial(VAE(H).apply,{'params': jax_utils.unreplicate(ema_params)})
forward_uncond_samples = partial(ema_apply, method=VAE(H).forward_uncond_samples)
# + colab={"base_uri": "https://localhost:8080/"} id="XF5dvNqeRcIC" outputId="477884a0-d016-43c3-96ac-26b3cfd65d55"
temperatures = [1.0, 0.9, 0.8, 0.7]
for t in temperatures[:H.num_temperatures_visualize]:
im = forward_uncond_samples(n_images, rng, t=t)
im = np.asarray(im)
save_n_show(im, [1,n_images], image_shape, f'{H.save_dir}/generations-tem-{t}.png')
# + colab={"base_uri": "https://localhost:8080/", "height": 0} id="RdypV3PJfyfN" outputId="bc5042cf-54c7-4380-e2f2-d36ab4951d65"
for t in temperatures[:H.num_temperatures_visualize]:
print("="*25)
print(f"Generation of {n_images} new images for t={t}")
print("="*25)
fname = f'{H.save_dir}/generations-tem-{t}.png'
display(Image.open(fname))
# + [markdown] id="89M1-l8Ogd2k"
# ## Reconstructions
# + id="014yXaJfgfhq"
n_images = 10
image_shape = [H.image_size,H.image_size,H.image_channels]
# + [markdown] id="z5xtClDEYTI-"
# Preprocessing images before getting the latents
# + id="81EExYe0glPu"
from train import get_sample_for_visualization
viz_batch_original, viz_batch_processed = get_sample_for_visualization(
data_valid_or_test, preprocess_fn, n_images, H.dataset)
# + [markdown] id="eDENCERSiMm6"
# Getting the partial functions from the model methods
# + id="vPpzIoM_hQHK"
forward_get_latents = partial(ema_apply, method=VAE(H).forward_get_latents)
forward_samples_set_latents = partial(
ema_apply, method=VAE(H).forward_samples_set_latents)
# + [markdown] id="AnNFN7S7YZe1"
# Getting latents of different levels.
# + id="nt2_Zjqlha1U"
zs = [s['z'] for s in forward_get_latents(viz_batch_processed, rng)]
# + [markdown] id="7RA8e6qJYcqF"
# No of latent observations used depends on `H.num_variables_visualize `, altering it gives different resolutions of the reconstructions.
# + id="ThgwoF6ihe9e"
recons = []
lv_points = np.floor(np.linspace(0, 1, H.num_variables_visualize + 2) * len(zs)).astype(int)[1:-1]
for i in lv_points:
recons.append(forward_samples_set_latents(n_images, zs[:i], rng, t=0.1))
# + [markdown] id="iawVwy7XYp9Z"
# Original Images
# + colab={"base_uri": "https://localhost:8080/", "height": 115} id="ih0D1sfRhy6F" outputId="8696bbaf-2a7c-4d89-9d7d-ebea19d37e7a"
orig_im = np.array(viz_batch_original)
print("Original test images")
save_n_show(orig_im, [1, n_images], image_shape, f'{H.save_dir}/orig_test.png', show=True)
# + [markdown] id="vbFgprJuYr7R"
# Reconstructions.
# + colab={"base_uri": "https://localhost:8080/", "height": 809} id="Ol7rNCgfh57R" outputId="e8d562cf-206e-42ae-a84b-5a5fd02489e8"
for i,r in enumerate(recons):
r = np.array(r)
print("="*25)
print(f"Generation of {n_images} new images for {i+1}x resolution")
print("="*25)
fname = f'{H.save_dir}/recon_test-res-{i+1}x.png'
save_n_show(r, [1, n_images], image_shape, fname, show=True)
| StarcoderdataPython |
3253431 | <gh_stars>0
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
class TypeOfMembership(models.Model):
membership_type = models.CharField(max_length=150)
#member_entry = models.CharField(max_length=150)
def __str__(self):
return self.membership_type
class User(AbstractUser):
username = models.CharField(blank=True, null=True, max_length=20, unique=True)
phone_number = models.CharField(blank=True, null=True, max_length=14, unique=True)
email = models.EmailField(_('email address'), unique=True)
USERNAME_FIELD = 'phone_number'
REQUIRED_FIELDS = ['username', 'first_name', 'last_name','email']
def __str__(self):
return "{}".format(self.email)
class UserProfile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name='profile')
title = models.CharField(max_length=5)
dob = models.DateField()
address = models.CharField(max_length=255)
country = models.CharField(max_length=50)
city = models.CharField(max_length=50)
zip = models.CharField(max_length=5)
payment_status = models.CharField(max_length=100)
balance = models.CharField(max_length=100)
membership_type = models.CharField(max_length=10)
photo = models.ImageField(upload_to='uploads', blank=True)
| StarcoderdataPython |
61941 | import tensorflow as tf
import sys
from configs import DEFINES
# 엘에스티엠(LSTM) 단층 네트워크 구성하는 부분
def make_lstm_cell(mode, hiddenSize, index):
cell = tf.nn.rnn_cell.BasicLSTMCell(hiddenSize, name = "lstm"+str(index))
if mode == tf.estimator.ModeKeys.TRAIN:
# 트레이닝 모드에서 드랍아웃 추가
cell = tf.contrib.rnn.DropoutWrapper(cell, output_keep_prob=DEFINES.dropout_width)
return cell
# 에스티메이터 모델
def model(features, labels, mode, params):
TRAIN = mode == tf.estimator.ModeKeys.TRAIN
EVAL = mode == tf.estimator.ModeKeys.EVAL
PREDICT = mode == tf.estimator.ModeKeys.PREDICT
# 인코딩 부분 (미리 정의된 임베딩 벡터 사용 유무)
if params['embedding'] == True:
# 가중치 행렬에 대한 초기화
initializer = tf.contrib.layers.xavier_initializer()
embedding = tf.get_variable(name = "embedding", # 이름
shape=[params['vocabulary_length'], params['embedding_size']], # 모양
dtype=tf.float32, # 타입
initializer=initializer, # 초기화 값
trainable=True) # 학습 유무
else:
# tf.eye를 통해서 사전의 크기 만큼의 단위행렬 구조 선언
embedding = tf.eye(num_rows = params['vocabulary_length'], dtype = tf.float32)
embedding = tf.get_variable(name = "embedding", # 이름
initializer = embedding, # 초기화 값
trainable = False) # 학습 유무
# 임베딩된 인코딩 배치를 생성
embedding_encoder = tf.nn.embedding_lookup(params = embedding, ids = features['input'])
# 임베딩된 디코딩 배치를 생성
embedding_decoder = tf.nn.embedding_lookup(params = embedding, ids = features['output'])
with tf.variable_scope('encoder_scope', reuse=tf.AUTO_REUSE):
# 값이 True이면 멀티레이어로 모델을 구성하고 False이면
# 단일레이어로 모델 구성
if params['multilayer'] == True:
encoder_cell_list = [make_lstm_cell(mode, params['hidden_size'], i) for i in range(params['layer_size'])]
rnn_cell = tf.contrib.rnn.MultiRNNCell(encoder_cell_list)
else:
rnn_cell = make_lstm_cell(mode, params['hidden_size'], "")
# rnn_cell에 의해 지정된 dynamic_rnn으로 반복적인 신경망 생성
# encoder_states 최종 상태 [batch_size, cell.state_size]
encoder_outputs, encoder_states = tf.nn.dynamic_rnn(cell=rnn_cell, # RNN 셀
inputs=embedding_encoder, # 입력 값
dtype=tf.float32) # 타입
with tf.variable_scope('decoder_scope', reuse=tf.AUTO_REUSE):
if params['multilayer'] == True:
decoder_cell_list = [make_lstm_cell(mode, params['hidden_size'], i) for i in range(params['layer_size'])]
rnn_cell = tf.contrib.rnn.MultiRNNCell(decoder_cell_list)
else:
rnn_cell = make_lstm_cell(mode, params['hidden_size'], "")
decoder_initial_state = encoder_states
decoder_outputs, decoder_states = tf.nn.dynamic_rnn(cell=rnn_cell, # RNN 셀
inputs=embedding_decoder, # 입력 값
initial_state=decoder_initial_state, # 인코딩의 마지막 값으로 초기화
dtype=tf.float32) # 타입
# logits는 마지막 히든레이어를 통과한 결과값
logits = tf.layers.dense(decoder_outputs, params['vocabulary_length'], activation=None)
# argmax를 통해서 최대 값을 가져 옴
predict = tf.argmax(logits, 2)
if PREDICT:
predictions = { # 예측 값들이 여기에 딕셔너리 형태로 저장
'indexs': predict, # 시퀀스 마다 예측한 값
}
return tf.estimator.EstimatorSpec(mode, predictions=predictions)
#
# logits과 같은 차원을 만들어 마지막 결과 값과 정답 값을 비교하여 에러를 구함
labels_ = tf.one_hot(labels, params['vocabulary_length'])
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(logits=logits, labels=labels_))
# 라벨과 결과가 일치하는지 빈도 계산을 통해 정확도를 측정
accuracy = tf.metrics.accuracy(labels=labels, predictions=predict,name='accOp')
# accuracy를 전체 값으로 나눠 확률 값 계산
metrics = {'accuracy': accuracy}
tf.summary.scalar('accuracy', accuracy[1])
if EVAL:
# 에러 값(loss)과 정확도 값(eval_metric_ops) 전달
return tf.estimator.EstimatorSpec(mode, loss=loss, eval_metric_ops=metrics)
# 수행 mode(tf.estimator.ModeKeys.TRAIN)가
# 아닌 경우는 여기 까지 오면 안되도록 하는 방어적 코드
assert TRAIN
optimizer = tf.train.AdamOptimizer(learning_rate=DEFINES.learning_rate)
train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step())
# 에러 값(loss)과 그라디언트 반환값 (train_op) 전달
return tf.estimator.EstimatorSpec(mode, loss=loss, train_op=train_op)
| StarcoderdataPython |
1641682 | <filename>tools/auth/backends.py
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.backends import ModelBackend
class SingleUserBackend(ModelBackend):
"""
Authenticate against only one user defined in settings.LOGIN_USER.
"""
supports_inactive_user = True
def __init__(self, single_user_name=None):
self.single_user_name = single_user_name or settings.LOGIN_USER
super().__init__()
def authenticate(self, request, password=None):
user = self.get_or_create_single_user()
pw_valid = user.check_password(password)
return user if pw_valid else None
def get_user(self, user_id):
return self.get_or_create_single_user()
def get_or_create_single_user(self):
try:
return User.objects.get(username=self.single_user_name)
except User.DoesNotExist:
# Create the single user and return it
# to permit first login with an empty password.
user = User(username=self.single_user_name)
user.set_password('')
user.save()
return user
def password_set(self):
return not self.get_or_create_single_user().check_password('')
| StarcoderdataPython |
3237179 | from __future__ import print_function
import sys
from pyc4 import c4
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def test_sum():
assert sum([1, 2, 3]) == 6, "Should be 6"
def test_encoding():
tests = [ { 'in': "", 'exp': "<KEY>" } ]
for test in tests:
actual = c4.Identify(test['in'])
if actual.string() != test['exp']:
eprint("IDs don't match, got ", actual, " expected ", test["exp"])
return False
return True
def test_all_ffff():
b = []
for i in range(64):
b.append(chr(0xFF))
data = ''.join(b)
if c4.ID(data).string() != "<KEY>":
eprint("IDs don't match, got ", id.string(), " expcted ", "<KEY>")
return False
id2, err = c4.parse("<KEY>")
if err:
eprint("Unexpected error ", err)
return False
for bb in id2.value:
if bb != chr(0xFF):
eprint(bb, "incorrect Parse results")
return False
return True
def test_all_0000():
b = []
for i in range(64):
b.append(chr(0))
data = ''.join(b)
if c4.ID(data).string() != "c41111111111111111111111111111111111111111111111111111111111111111111111111111111111111111":
eprint("IDs don't match, got ", id.string(), " expcted ",
"c41111111111111111111111111111111111111111111111111111111111111111111111111111111111111111")
return False
id2, err = c4.parse(
"c41111111111111111111111111111111111111111111111111111111111111111111111111111111111111111")
if err:
eprint("Unexpected error ", err)
return False
for bb in id2.value:
if bb != chr(0):
eprint(bb, "incorrect Parse results")
return False
return True
def test_append_order():
byteData = [
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 58],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0d, 0x24],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0xfa, 0x28],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xac, 0xad, 0x10]
]
expectedIDs = ["c41111111111111111111111111111111111111111111111111111111111111111111111111111111111111121", "c41111111111111111111111111111111111111111111111111111111111111111111111111111111111111211", "c41111111111111111111111111111111111111111111111111111111111111111111111111111111111112111", "c41111111111111111111111111111111111111111111111111111111111111111111111111111111111121111"]
k = 0
for num in byteData:
b = []
for c in num:
b.append(chr(c))
id = c4.ID(''.join(b))
if id.string() != expectedIDs[k]:
eprint("IDs don't match, got ", id.string(), " , expcted ", expectedIDs[k])
return False
id2, err = c4.parse(expectedIDs[k])
if err:
eprint("Unexpected error ", err)
return False
i = 0
for bb in id2.value:
if bb != chr(byteData[k][i]):
eprint(bb, "incorrect Parse results")
return False
i = i + 1
k = k + 1
return True
def test_parse():
tests = [
{ "in": "<KEY>",
"err": None,
"exp": "This is a pretend asset file, for testing asset id generation.\n"
},
{
"in": "<KEY>",
"err": "invalid character at 3",
"exp": ""
},
{
"in": "c430cjRutKqZSCrW43QGU1uwRZTGoVD7A7kPHKQ1z4X<KEY>",
"err": "is not 90 characters long",
"exp": ""
}
]
i = 0
for test in tests:
id, err = c4.parse(test["in"])
if test["err"] is not None:
if not err:
eprint("Expected error but got none")
return False
elif err != test["err"]:
eprint("incorrect error got ", err, " expected ", test["err"])
return False
continue
elif err is not None:
eprint("Unexpected error ", err)
return False
expectedID = c4.Identify(test["exp"])
if expectedID != id:
eprint("IDs don't match, got ", _stringOf(id), ", expcted ", _stringOf(expectedID))
return False
return True
def _stringOf(id):
if not id:
return ""
return id.string()
def test_id_less():
id1 = c4.Identify("1") # <KEY>
id2 = c4.Identify("2") # c42i2hTBA9Ej4nqEo9iUy3pJRRE53KAH9RwwMSWjmfaQN7LxCymVz1zL9hEjqeFYzxtxXz2wRK7CBtt71AFkRfHodu
if id1.less(id2) != False:
eprint("expected %q to be less than %q", id2, id1)
return False
return True
def test_id_cmp():
id1 = c4.Identify("1") # c42yrSHMvUcscrQBssLhrRE28YpGUv9Gf95uH8KnwTiBv4odDbVqNnCYFs3xpsLrgVZfHebSaQQsvxgDGmw5CX1fVy
id2 = c4.Identify("2") # c42i2hTBA9Ej4nqEo9iUy3pJRRE53KAH9RwwMSWjmfaQN7LxCymVz1zL9hEjqeFYzxtxXz2wRK7CBtt71AFkRfHodu
# is.Equal(id1.Cmp(id2), 1)
if id1.Cmp(id2) != 1:
eprint("Incorrect comparison between %q, %q", id1, id2)
return False
if id2.Cmp(id1) != -1:
eprint("Incorrect comparison between %q, %q", id2, id1)
return False
if id1.Cmp(id1) != 0:
eprint("Incorrect comparison between %q, %q", id1, id1)
return False
return True
def TestCompareIDs():
tests = [
{
"id_a": c4.Identify(strings.NewReader("Test string")),
"id_b": c4.Identify(strings.NewReader("Test string")),
"exp": 0
},
{
"id_a": c4.Identify(strings.NewReader("Test string A")),
"id_b": c4.Identify(strings.NewReader("Test string B")),
"exp": -1
},
{
"id_a": c4.Identify(strings.NewReader("Test string B")),
"id_b": c4.Identify(strings.NewReader("Test string A")),
"exp": 1
},
{
"id_a": c4.Identify(strings.NewReader("Test string")),
"id_b": id,
"exp": -1
}
]
for test in tests:
if test["id_a"].Cmp(test["id_b"]) != test["exp"]:
eprint("Incorrect comparison between %q, %q", test["id_a"], test["id_b"])
return False
return True
def test_bytes_to_id():
byteData = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 58]
b = []
for num in byteData:
for c in num:
b.append(chr(c))
id = c4.ID(''.join(b))
if id.string() != "c41111111111111111111111111111111111111111111111111111111111111111111111111111111111111121":
eprint("IDs don't match, got %q, expcted %q", id.string(), "c41111111111111111111111111111111111111111111111111111111111111111111111111111111111111121")
return False
return True
def test_nil_id():
# ID of nothing constant
nilid = c4.Identify("")
if nilid.string() != "c459dsjfscH38cYeXXYogktxf4Cd9ibshE3BHUo6a58hBXmRQdZrAkZzsWcbWtDg5oQstpDuni4Hirj75GEmTc1sFT":
eprint("IDs don't match, got %q, expcted %q", nilid.string(), "c459dsjfscH38cYeXXYogktxf4Cd9ibshE3BHUo6a58hBXmRQdZrAkZzsWcbWtDg5oQstpDuni4Hirj75GEmTc1sFT")
return False
return True
test_vectors = ["alfa", "bravo", "charlie", "delta", "echo", "foxtrot", "golf", "hotel", "india"]
test_vector_ids = [
# Initial list (unsorted).
[
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>"
],
# After round 1
[
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>",
"<KEY>"
],
# After round 2
[
"<KEY>",
"<KEY>",
"<KEY>"
],
# After round 3
[
"<KEY>",
"<KEY>"
],
# Final ID
[
"<KEY>"
]
]
def test_identification():
for test in test_vectors :
c4id = c4.Identify(test)
if c4id.string() != test_vector_ids[0][i]:
eprint("IDs don't match, got %q expected %q", c4id.string(), test_vector_ids[0][i])
return False
return True
def view_bytes(b):
length = 4
out = fmt.Sprintf("(%d)[", len(b))
for j = 0; j < length; j++:
num = strconv.Itoa(int(b[j]))
out += fmt.Sprintf(" %s%s", strings.Repeat(" ", 3-len(num)), num)
out += fmt.Sprintf(" ... ")
offset = 64 - length
if len(b) >= 128:
for j := 64 - length; j < 64+length; j++:
if j == 64:
out += " |"
num = strconv.Itoa(int(b[j]))
out += fmt.Sprintf(" %s%s", strings.Repeat(" ", 3-len(num)), num)
offset = 128 - length
out += fmt.Sprintf(" ... ")
for j := offset; j < offset+length; j++:
num = strconv.Itoa(int(b[j]))
out += fmt.Sprintf(" %s%s", strings.Repeat(" ", 3-len(num)), num)
return out + " ]"
class testDataType:
def __init__(self, value, id, c4id):
self.value = value
self.id = id
self.c4id = c4id
def TestDigestSum():
test_data = []
for s in test_vectors:
dig = c4.Identify(s)
id, err = c4.parse(test_vector_ids[0][i])
if err != nil:
eprint("unexpected error %q", err)
if id.string() != dig.string():
eprint("IDs don't match, got %q expected %q", id, dig)
if id.string() != test_vector_ids[0][i]:
eprint("IDs don't match, got %q expected %q", id.string(), test_vector_ids[0][i])
test_data = append(test_data, testDataType{s, id, test_vector_ids[0][i]})
# pair := make([]byte, 0, 128)
# var l, r c4.ID
# var key string
# var id c4.ID
# lbytes, rbytes := make([]byte, 64), make([]byte, 64)
# for i, dta := range test_data {
# pair = append(pair, dta.Id[:]...)
# key = dta.Value
# id = dta.Id
# if i > 0 && i%2 == 1 {
# // right hand side
# t.Logf("%d: \"%s\"\n %s %s\n", i, key, id, viewBytes(dta.Id[:]))
# t.Logf("\tpair: %s\n", viewBytes(pair))
# r = dta.Id
# copy(rbytes, r[:])
# data := make([]byte, 64)
# switch r.Cmp(l) {
# case -1:
# copy(data, r[:])
# data = append(data, l[:]...)
# case 0:
# copy(data, l[:])
# case 1:
# copy(data, l[:])
# data = append(data, r[:]...)
# }
# t.Logf("\t l: %s\n\t r: %s\n", viewBytes(l[:]), viewBytes(r[:]))
# t.Logf("\tdata: %s\n", viewBytes(data))
# testsum1 := c4.Identify(bytes.NewReader(data))
# sum := l.Sum(r)
# // Check Sum produces the expected ID
# if testsum1.Cmp(sum) != 0 {
# t.Errorf("Digests don't match, got %q expected %q", testsum1, sum)
# }
# // Check that Sum did not alter l, or r
# if bytes.Compare(r[:], rbytes[:]) != 0 {
# t.Error("Sum altered source r")
# }
# if bytes.Compare(l[:], lbytes) != 0 {
# t.Errorf("Sum altered source l")
# }
# t.Logf("\t testsum1: %s\n\t sum: %s\n", viewBytes(testsum1[:]), viewBytes(sum[:]))
# var id1, id2 c4.ID
# copy(id1[:], pair[:64])
# copy(id2[:], pair[64:])
# testsum2 := id1.Sum(id2)
# if testsum2.Cmp(sum) != 0 {
# t.Errorf("IDs don't match, got %q expected %q", testsum2, sum)
# }
# pair = pair[:0]
# continue
# }
# // left hand side
# l = dta.Id
# copy(lbytes, l[:])
# t.Logf("%d: \"%s\"\n %s %s\n", i, key, id, viewBytes(dta.Id[:]))
# }
# }
# func TestDigestSlice(t *testing.T) {
# ids := make(c4.IDs, len(test_vectors))
# for i, s := range test_vectors {
# ids[i] = c4.Identify(strings.NewReader(s))
# }
# sort.Sort(ids)
# n := set.Uniq(ids)
# ids = ids[:n]
# t.Run("Order", func(t *testing.T) {
# if len(ids) != len(test_vectors) {
# t.Errorf("lengths do not match got %d, expected %d", len(ids), len(test_vectors))
# }
# sorted_test_vector_ids := make([]string, len(test_vector_ids[0]))
# copy(sorted_test_vector_ids, test_vector_ids[0])
# sort.Strings(sorted_test_vector_ids)
# for i, idstring := range sorted_test_vector_ids {
# if idstring != ids[i].String() {
# t.Errorf("IDs don't match, got %q expected %q", idstring, ids[i].String())
# }
# }
# c4id := ids.Tree().ID()
# if c4id.String() != "<KEY>" {
# t.Errorf("IDs don't match, got %q expected %q", c4id.String(), "<KEY>")
# }
# })
# }
# func TestDigest(t *testing.T) {
# var b []byte
# for i := 0; i < 64; i++ {
# b = append(b, 0xFF)
# }
# var id c4.ID
# copy(id[:], b)
# if id.String() != `<KEY>` {
# t.Errorf("IDs don't match, got %q expected %q", id.String(), `c467rpwLCuS5DGA8KGZXKsVQ7dnPb9goRLoKfgGbLfQg9WoLUgNY77E2jT11fem3coV9nAkguBACzrU1iyZM4B8roQ`)
# }
# id2, err := c4.Parse(`c41111111111111111111111111111111111111111111111111111111111111111111111111111111111111121`)
# tb2 := []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 58}
# if err != nil {
# t.Errorf("unexpected error %q", err)
# }
# b2 := id2.Digest()
# for i, bb := range b2 {
# if bb != tb2[i] {
# t.Errorf("error parsing")
# }
# }
# for _, test := range []struct {
# Bytes []byte
# IdStr string
# }{
# {
# Bytes: []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 58},
# IdStr: `c41111111111111111111111111111111111111111111111111111111111111111111111111111111111111121`,
# },
# {
# Bytes: []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0d, 0x24},
# IdStr: `c41111111111111111111111111111111111111111111111111111111111111111111111111111111111111211`,
# },
# {
# Bytes: []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0xfa, 0x28},
# IdStr: `c41111111111111111111111111111111111111111111111111111111111111111111111111111111111112111`,
# },
# {
# Bytes: []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xac, 0xad, 0x10},
# IdStr: `c41111111111111111111111111111111111111111111111111111111111111111111111111111111111121111`,
# },
# } {
# id, err := c4.Parse(test.IdStr)
# if err != nil {
# t.Errorf("unexpected error %q", err)
# }
# for i, bb := range id.Digest() {
# if bb != test.Bytes[i] {
# t.Errorf("error parsing")
# }
# }
# }
# }
# func TestIdentify(t *testing.T) {
# id := c4.Identify(iotest.DataErrReader(strings.NewReader("foo")))
# if id.String() != "<KEY>" {
# t.Errorf("C4 IDs don't match, got %q, expected %q", id.String(), "c45xZeXwMSpqXjpDumcHMA6mhoAmGHkUo7r9WmN2UgSEQzj9KjgseaQdkEJ11fGb5S1WEENcV3q8RFWwEeVpC7Fjk2")
# }
# }
# // returns error on read for testing the negative case
# type errorReader bool
# func (e errorReader) Read(p []byte) (int, error) {
# if e == true {
# return 0, errors.New("errorReader triggered error.")
# }
# return 0, nil
# }
# func TestIOFailure(t *testing.T) {
# id := c4.Identify(errorReader(true))
# if !id.IsNil() {
# t.Errorf("expected id to be nil but got: %s", id)
# }
# }
# func TestMarshalJSON(t *testing.T) {
# var empty c4.ID
# type testType struct {
# Name string `json:"name"`
# ID c4.ID `json:"id"`
# }
# nilID := c4.Identify(strings.NewReader(""))
# for _, test := range []struct {
# In testType
# Exp string
# }{
# {
# In: testType{"Test", nilID},
# Exp: `{"name":"Test","id":"c459dsjfscH38cYeXXYogktxf4Cd9ibshE3BHUo6a58hBXmRQdZrAkZzsWcbWtDg5oQstpDuni4Hirj75GEmTc1sFT"}`,
# },
# {
# In: testType{"Test", empty},
# Exp: `{"name":"Test","id":""}`,
# },
# {
# In: testType{"Test", empty},
# Exp: `{"name":"Test","id":""}`,
# },
# } {
# actual, err := json.Marshal(test.In)
# if err != nil {
# t.Errorf("unexpected error %q", err)
# }
# if string(actual) != test.Exp {
# t.Errorf("results do not match got %q, expected %q", string(actual), test.Exp)
# }
# }
# }
# func TestUnarshalJSON(t *testing.T) {
# type testType struct {
# Name string `json:"name"`
# Id c4.ID `json:"id"`
# }
# var unset c4.ID
# nilID := c4.Identify(strings.NewReader(""))
# for i, test := range []struct {
# In []byte
# Exp testType
# }{
# {
# In: []byte(`{"name":"Test","id":"<KEY>"}`),
# Exp: testType{"Test", nilID},
# },
# {
# In: []byte(`{"name":"Test","id":""}`),
# Exp: testType{"Test", unset},
# },
# } {
# var testObject testType
# err := json.Unmarshal([]byte(test.In), &testObject)
# if err != nil {
# t.Errorf("unexpected error %q", err)
# }
# t.Logf("> %d: %v", i, testObject)
# if testObject.Id.IsNil() {
# if !test.Exp.Id.IsNil() {
# t.Errorf("%d results do not match got %v, expected %v", i, testObject, test.Exp)
# }
# } else if testObject.Name != test.Exp.Name || testObject.Id.String() != test.Exp.Id.String() {
# t.Errorf("results do not match got %v, expected %v", testObject, test.Exp)
# }
# }
# }
if __name__ == "__main__":
# test_sum()
allpass = True
if not test_encoding():
allpass = False
print("test_encoding FAILED")
if not test_all_ffff():
allpass = False
print("test_all_ffff FAILED")
if not test_all_0000():
allpass = False
print("test_all_0000 FAILED")
if not test_append_order():
allpass = False
print("test_append_order FAILED")
if not test_parse():
allpass = False
print("test_parse FAILED")
if not test_id_less():
allpass = False
print("test_id_less FAILED")
if allpass:
print("Everything passed")
| StarcoderdataPython |
4812694 | <filename>src/octonote/core/format.py
import textwrap
import pastel
printer = pastel.Pastel(True)
printer.add_style("header", options=["bold"])
printer.add_style("location", "light_blue")
# printer.add_style("code", "white")
printer.add_style("notice", options=["bold"])
printer.add_style("warning", "yellow", options=["bold"])
printer.add_style("error", "red", options=["bold"])
class BaseFormat:
_github = False
_title = None
_annotations = []
def __init__(self, title, annotations):
self._printer = printer
self._title = title
self._annotations = annotations
def _print(self, str):
print(printer.colorize(str))
def _print_prologue(self, group_name=None):
pass
def _print_annotation(self, annotation):
pass
def _print_annotations(self):
for annotation in self._annotations:
self._print_annotation(annotation)
def _print_epilogue(self, group_name=None):
pass
def print(self, group_name=None):
self._print_prologue(group_name)
self._print_annotations()
self._print_epilogue(group_name)
class CommandFormat(BaseFormat):
def _print_prologue(self, group_name=None):
if group_name:
group = f"::group::{group_name}"
print(group)
def _print_annotation(self, annotation):
severity_name = annotation["severity_name"]
file = f"file={annotation['file']}"
line = f"line={int(annotation['line'])}"
end_line = f"endLine={int(annotation['end-line'])}"
title = f"title={annotation['title']}"
message = annotation["message"]
print(f"::{severity_name} {file},{line},{end_line},{title}::{message}")
def _print_epilogue(self, group_name=None):
if group_name:
print("::endgroup::")
class ConsoleFormat(BaseFormat):
def _print_prologue(self, group_name=None):
self._print(f"<header>{self._title}</header>")
def _print_annotation(self, annotation):
print("")
file = annotation["file"]
line = int(annotation["line"])
end_line = int(annotation["end-line"])
end_line = "" if end_line == line else f":{end_line}"
location = f" <location>{file}:{line}{end_line}</location>"
self._print(location)
# with open(filename, "r") as file:
# lines = file.readlines()
# line = lines[line_num - 1].rstrip("\n")
# line = style.print(f"<code>{line}</code>")
# print(line)
severity_name = annotation["severity_name"]
severity_len = " " * len(severity_name)
if annotation["severity_name"] == "notice":
severity_name = annotation["severity_name"]
severity_name = f"<notice>{severity_name.capitalize()}:</notice>"
if annotation["severity_name"] == "warning":
severity_name = annotation["severity_name"]
severity_name = f"<warning>{severity_name.capitalize()}:</warning>"
if annotation["severity_name"] == "error":
severity_name = annotation["severity_name"]
severity_name = f"<error>{severity_name.capitalize()}:</error>"
initial_indent = " " + severity_len
msg = annotation["message"]
msg = textwrap.fill(
msg, 79, initial_indent=initial_indent, subsequent_indent=" "
)
msg = msg.lstrip()
self._print(f" {severity_name} {msg}")
| StarcoderdataPython |
1763334 | <filename>src/test/data_structure/dictionary_exercise_test.py
import unittest
from src.main.data_structure.dictionary_exercise import *
class DictTest(unittest.TestCase):
def test_add_element(self):
_dict = Dict({"a": 1, "b": 2, "c": 3})
self.assertEqual(_dict.add_element("d", 4), {"a": 1, "b": 2, "c": 3, "d": 4})
def test_get_element_by_key(self):
_dict = Dict({("a", "b"): 12})
self.assertEqual(_dict.get_element_by_key(("a", "b")), 12)
def test_delete_element_by_key(self):
_dict = Dict({"a": 0, "b": 1})
self.assertEqual(_dict.delete_element_by_key("a"), {"b": 1})
def test_clear(self):
_dict = Dict({"a": "b", "b": "c", "c": "a"})
self.assertEqual(
Dict({1: "Google", 2: "Facebook", 3: "Twitter"}).clear(), _dict.clear()
)
def test_keys(self):
_dict = Dict({"Key1": 1, "Key2": 2})
self.assertEqual(list(_dict.keys()), list(["Key1", "Key2"]))
def test_values(self):
_dict = Dict({"Key1": 1, "Key2": 2})
self.assertEqual(list(_dict.values()), list([1, 2]))
def test_mapping_list_into_dictionary(self):
_dict = Dict({})
key_list = ["red", "green", "blue"]
value_list = ["#FF0000", "#008000", "#0000FF"]
self.assertEqual(
_dict.mapping_list_into_dictionary(key_list, value_list),
{"red": "#FF0000", "green": "#008000", "blue": "#0000FF"},
)
def test_find_max_value(self):
_dict = Dict({"a": 1, "b": 22, "c": 3})
self.assertEqual(_dict.find_max_value(), 22)
def test_find_min_value(self):
_dict = Dict({"a": 1, "b": 22, "c": 30, "d": 2})
self.assertEqual(_dict.find_min_value(), 1)
def test_sort_dict_by_key(self):
_dict = Dict({"b": 2, "f": 30, "d": 34, "c": 3, "a": 1})
self.assertEqual(
_dict.sort_dict_by_key(), {"a": 1, "b": 2, "c": 3, "d": 34, "f": 30}
)
def test_tutorial_marks_average(self):
_dict = Dict({})
self.assertEqual(
_dict.tutorial_marks_average(),
[
{"id": 101, "subject": "C", "First + Second": 22.5},
{"id": 102, "subject": "Python", "First + Second": 23.5},
{"id": 103, "subject": "Java", "First + Second": 20.5},
],
)
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
1677795 | <reponame>juliolugo96/projex-api
import json
from users.models import CustomUser
from api.models import *
from api.serializers import *
from .authBase import AuthBaseTestCase
class BoardTestCase(AuthBaseTestCase):
url = '/api/v1/assignees'
def setUp(self):
super().setUp() # Create 4 users(user,jefe,emp1,emp2) and Login jefe
self.data_project = {
"title": "black Mesa",
"description": "particle accelerator",
"creator": self.jefe,
}
self.project = Project.objects.create(**self.data_project)
self.boards = Board.objects.filter(project=self.project.id)
self.board = self.boards[0]
print(self.boards)
print(self.board)
self.data_task = {
"title": "task 1",
"description": "first task",
"priority": 1,
"due_date": "2019-07-27",
"board": self.board,
}
self.task = Task.objects.create(**self.data_task)
#self.data_task['board'] = self.board.id
self.data_userProject = {
"user": self.emp1,
"project": self.project,
"role": "Backend",
"status": "active",
}
self.emp1Proj = UserProject.objects.create(**self.data_userProject)
data = {
"user": self.emp1,
"task": self.task,
}
self.assigTask1_Emp1 = Assignee.objects.create(**data)
def test_read_assignees_get(self):
"""
Test to verify GET assignees valid (Model and Serializer)
"""
url = self.url + '?task={id}'.format(id=self.task.id)
response = self.client.get(url)
print(response.status_code, response.content)
self.assertEqual(200, response.status_code)
response_data = json.loads(response.content)
print(response_data)
assigneeSerial = AssigneeSerializer(instance=self.assigTask1_Emp1)
print(assigneeSerial.data)
self.assertEqual(assigneeSerial.data, response_data['results'][0])
| StarcoderdataPython |
3208036 | <filename>flow/utils/trafficlights.py
import numpy as np
def get_phase(name, ts):
phases = all_phases[name]
for i, phase in enumerate(phases):
phase['duration'] = phase['minDur'] = phase['maxDur'] = str(ts[i % len(ts)])
return phases
def get_uniform_random_phase(name, means, noises, T=500):
i, acc_t = 0, 0
phases = []
while acc_t <= T:
mean, noise = means[i % len(means)], noises[i % len(noises)]
phase = all_phases[name][i % len(all_phases[name])]
t = np.random.rand() * 2 * noise * mean + mean * (1 - noise)
phase['duration'] = phase['minDur'] = phase['maxDur'] = str(t)
phases.append(phase)
acc_t += t
i += 1
return phases
all_phases = {}
all_phases['center'] = [{
"state": "GGggrrrrGGggrrrr"
}, {
"state": "yyyyrrrryyyyrrrr"
}, {
"state": "rrrrGGggrrrrGGgg"
}, {
"state": "rrrryyyyrrrryyyy"
}]
all_phases['bottom'] = [{
"state": "rrrGGgGgg",
}, {
"state": "rrryyyyyy",
}, {
"state": "GGgGrrrrr",
}, {
"state": "yyyyrrrrr",
}]
all_phases['top'] = [{
"state": "GggrrrGGg",
}, {
"state": "yyyrrryyy",
}, {
"state": "rrrGGgGrr",
}, {
"state": "rrryyyyrr",
}]
all_phases['right'] = [{
"state": "GGgGggrrr",
}, {
"state": "yyyyyyrrr",
}, {
"state": "GrrrrrGGg",
}, {
"state": "yrrrrryyy",
}]
all_phases['left'] = [{
"state": "GggrrrGGg",
}, {
"state": "yyyrrryyy",
}, {
"state": "rrrGGgGrr",
}, {
"state": "rrryyyyrr",
}]
all_phases['left_in'] = [{
"state": "GggrrrGGgrr",
}, {
"state": "yyyrrryyyrr",
}, {
"state": "rrrGGgGrrrr",
}, {
"state": "rrryyyyrrrr",
}, {
"state": "rrgGrrrrrGG",
}, {
"state": "rryyrrrrryy"
}]
all_phases['top_in'] = [{
"state": "rrrrrGGgGrr",
}, {
"state": "rrrrryyyyrr",
}, {
"state": "rrGggrrrGGg",
}, {
"state": "rryyyrrryyy",
}, {
"state": "GGrrrrrrrrG",
}, {
"state": "yyrrrrrrrry"
}]
| StarcoderdataPython |
1623548 | <filename>tests/generate/test_generate_copy_without_render.py
"""Verify correct work of `_copy_without_render` context option."""
import os
import pytest
import tackle.utils.paths
from tackle.main import tackle
from tackle.utils.paths import rmtree
@pytest.fixture
def remove_test_dir():
"""Fixture. Remove the folder that is created by the test."""
if os.path.exists('test_copy_without_render'):
rmtree('test_copy_without_render')
yield
if os.path.exists('test_copy_without_render'):
rmtree('test_copy_without_render')
@pytest.mark.usefixtures('clean_system', 'remove_test_dir')
def test_generate_copy_without_render_extensions(change_dir):
"""Verify correct work of `_copy_without_render` context option.
Some fixtures/files/directories should be rendered during invocation,
some just copied, without any modification.
"""
tackle('test-generate-copy-without-render', no_input=True)
dir_contents = os.listdir('test_copy_without_render')
assert 'test_copy_without_render-not-rendered' in dir_contents
assert 'test_copy_without_render-rendered' in dir_contents
with open('test_copy_without_render/README.txt') as f:
assert '{{cookiecutter.render_test}}' in f.read()
with open('test_copy_without_render/README.rst') as f:
assert 'I have been rendered!' in f.read()
with open(
'test_copy_without_render/test_copy_without_render-rendered/README.txt'
) as f:
assert '{{cookiecutter.render_test}}' in f.read()
with open(
'test_copy_without_render/test_copy_without_render-rendered/README.rst'
) as f:
assert 'I have been rendered' in f.read()
with open(
'test_copy_without_render/'
'test_copy_without_render-not-rendered/'
'README.rst'
) as f:
assert '{{cookiecutter.render_test}}' in f.read()
with open('test_copy_without_render/rendered/not_rendered.yml') as f:
assert '{{cookiecutter.render_test}}' in f.read()
with open(
'test_copy_without_render/' 'test_copy_without_render-rendered/' 'README.md'
) as f:
assert '{{cookiecutter.render_test}}' in f.read()
| StarcoderdataPython |
3225464 | '''
Statistical Computing for Scientists and Engineers
Homework 2
Fall 2018
University of Notre Dame
'''
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import expon
from sklearn.metrics import mean_squared_error
x = np.linspace(1,501,500)
print (x.shape)
main_MLE = []
main_MAP = []
for i in range (1,501):
A = np.random.exponential(scale=5,size=i)
##############################################################
# INSERT CODE BELOW
##############################################################
lambda_MLE =
##############################################################
# INSERT CODE ABOVE
##############################################################
mse_MLE = ((0.2 - lambda_MLE) ** 2).mean(axis=None)
main_MLE.append(mse_MLE)
alpha = 30
beta = 100
n = len(A)
##############################################################
# INSERT CODE BELOW
##############################################################
lambda_MAP =
##############################################################
# INSERT CODE ABOVE
##############################################################
mse_MAP = ((0.2 - lambda_MAP) ** 2).mean(axis=None)
main_MAP.append(mse_MAP)
print (mean)
print (map_E)
main_MLE_value = np.array(main_MLE)
main_MAP_value = np.array(main_MAP)
print (main_MLE_value.shape)
plt.plot(x,main_MLE_value)
plt.plot(x,main_MAP_value)
plt.legend(['MLE','MAP'])
plt.xlabel('N', fontsize = 16)
plt.ylabel('MSE', fontsize = 16)
plt.savefig('Solution-6C.png')
plt.show()
| StarcoderdataPython |
1604001 | from django.contrib.contenttypes.models import ContentType
from django.contrib.gis.db.models import Union
from django.db.models import DurationField, Q
from django.db.models.functions import Cast
from django.utils.translation import ugettext_lazy as _
from enumfields.drf import EnumField, EnumSupportSerializerMixin
from rest_framework import serializers
from field_permissions.serializers import FieldPermissionsSerializerMixin
from leasing.enums import LeaseRelationType
from leasing.models import (
AreaNote, BasisOfRent, EmailLog, InfillDevelopmentCompensation, RelatedLease, ReservationProcedure)
from leasing.serializers.debt_collection import (
CollectionCourtDecisionSerializer, CollectionLetterSerializer, CollectionNoteSerializer)
from leasing.serializers.invoice import InvoiceNoteCreateUpdateSerializer, InvoiceNoteSerializer
from users.models import User
from users.serializers import UserSerializer
from ..models import (
Contact, District, Financing, Hitas, IntendedUse, Lease, LeaseIdentifier, LeaseType, Municipality, NoticePeriod,
Regulation, SpecialProject, StatisticalUse, SupportiveHousing)
from .contact import ContactSerializer
from .contract import ContractCreateUpdateSerializer, ContractSerializer
from .decision import DecisionCreateUpdateNestedSerializer, DecisionSerializer
from .inspection import InspectionSerializer
from .land_area import (
LeaseAreaCreateUpdateSerializer, LeaseAreaListSerializer, LeaseAreaSerializer, LeaseAreaWithGeometryListSerializer)
from .rent import (
LeaseBasisOfRentCreateUpdateSerializer, LeaseBasisOfRentSerializer, RentCreateUpdateSerializer, RentSerializer)
from .tenant import TenantCreateUpdateSerializer, TenantSerializer
from .utils import InstanceDictPrimaryKeyRelatedField, NameModelSerializer, UpdateNestedMixin
class DistrictSerializer(serializers.ModelSerializer):
class Meta:
model = District
fields = '__all__'
class FinancingSerializer(serializers.ModelSerializer):
class Meta:
model = Financing
fields = '__all__'
class HitasSerializer(serializers.ModelSerializer):
class Meta:
model = Hitas
fields = '__all__'
class IntendedUseSerializer(serializers.ModelSerializer):
class Meta:
model = IntendedUse
fields = '__all__'
class LeaseTypeSerializer(EnumSupportSerializerMixin, serializers.ModelSerializer):
class Meta:
model = LeaseType
fields = '__all__'
class MunicipalitySerializer(NameModelSerializer):
class Meta:
model = Municipality
fields = '__all__'
class NoticePeriodSerializer(EnumSupportSerializerMixin, serializers.ModelSerializer):
class Meta:
model = NoticePeriod
fields = '__all__'
class RegulationSerializer(NameModelSerializer):
class Meta:
model = Regulation
fields = '__all__'
class StatisticalUseSerializer(NameModelSerializer):
class Meta:
model = StatisticalUse
fields = '__all__'
class SupportiveHousingSerializer(NameModelSerializer):
class Meta:
model = SupportiveHousing
fields = '__all__'
class SpecialProjectSerializer(NameModelSerializer):
class Meta:
model = SpecialProject
fields = '__all__'
class ReservationProcedureSerializer(NameModelSerializer):
class Meta:
model = ReservationProcedure
fields = '__all__'
class LeaseIdentifierSerializer(serializers.ModelSerializer):
type = LeaseTypeSerializer()
municipality = MunicipalitySerializer()
district = DistrictSerializer()
class Meta:
model = LeaseIdentifier
fields = ('type', 'municipality', 'district', 'sequence')
class LeaseSuccinctSerializer(EnumSupportSerializerMixin, FieldPermissionsSerializerMixin, serializers.ModelSerializer):
id = serializers.ReadOnlyField()
type = LeaseTypeSerializer()
municipality = MunicipalitySerializer()
district = DistrictSerializer()
identifier = LeaseIdentifierSerializer(read_only=True)
class Meta:
model = Lease
fields = ('id', 'deleted', 'created_at', 'modified_at', 'type', 'municipality', 'district', 'identifier',
'start_date', 'end_date', 'state', 'is_rent_info_complete', 'is_invoicing_enabled',
'reference_number', 'note', 'preparer', 'is_subject_to_vat')
class LeaseSuccinctWithGeometrySerializer(LeaseSuccinctSerializer):
lease_areas = LeaseAreaWithGeometryListSerializer(many=True, required=False, allow_null=True)
class Meta:
model = Lease
fields = ('id', 'deleted', 'created_at', 'modified_at', 'type', 'municipality', 'district', 'identifier',
'start_date', 'end_date', 'state', 'is_rent_info_complete', 'is_invoicing_enabled',
'reference_number', 'note', 'preparer', 'is_subject_to_vat', 'lease_areas')
class RelatedToLeaseSerializer(EnumSupportSerializerMixin, serializers.ModelSerializer):
to_lease = LeaseSuccinctSerializer()
class Meta:
model = RelatedLease
fields = '__all__'
class RelatedLeaseSerializer(EnumSupportSerializerMixin, serializers.ModelSerializer):
def validate(self, data):
if data['from_lease'] == data['to_lease']:
raise serializers.ValidationError(_("from_lease and to_lease cannot be the same Lease"))
return data
class Meta:
model = RelatedLease
fields = '__all__'
class RelatedFromLeaseSerializer(EnumSupportSerializerMixin, serializers.ModelSerializer):
from_lease = LeaseSuccinctSerializer()
class Meta:
model = RelatedLease
fields = '__all__'
class LeaseSerializerBase(EnumSupportSerializerMixin, FieldPermissionsSerializerMixin, serializers.ModelSerializer):
id = serializers.ReadOnlyField()
type = LeaseTypeSerializer()
municipality = MunicipalitySerializer()
district = DistrictSerializer()
identifier = LeaseIdentifierSerializer(read_only=True)
tenants = TenantSerializer(many=True, required=False, allow_null=True)
lease_areas = LeaseAreaSerializer(many=True, required=False, allow_null=True)
lessor = ContactSerializer(required=False, allow_null=True)
contracts = ContractSerializer(many=True, required=False, allow_null=True)
decisions = DecisionSerializer(many=True, required=False, allow_null=True)
inspections = InspectionSerializer(many=True, required=False, allow_null=True)
rents = RentSerializer(many=True, required=False, allow_null=True)
basis_of_rents = LeaseBasisOfRentSerializer(many=True, required=False, allow_null=True)
collection_court_decisions = CollectionCourtDecisionSerializer(many=True, required=False, allow_null=True)
collection_letters = CollectionLetterSerializer(many=True, required=False, allow_null=True)
collection_notes = CollectionNoteSerializer(many=True, required=False, allow_null=True)
invoice_notes = InvoiceNoteSerializer(many=True, required=False, allow_null=True)
class Meta:
model = Lease
exclude = ('related_leases', )
class LeaseListSerializer(LeaseSerializerBase):
basis_of_rents = None
contracts = None
decisions = None
inspections = None
rents = None
related_leases = None
lease_areas = LeaseAreaListSerializer(many=True, required=False, allow_null=True)
collection_court_decisions = None
collection_letters = None
collection_notes = None
def get_related_lease_predecessors(to_lease_id, accumulator=None):
if accumulator is None:
accumulator = []
accumulator.append(to_lease_id)
result = set()
predecessors = RelatedLease.objects.filter(to_lease=to_lease_id).select_related('to_lease', 'from_lease')
if predecessors:
for predecessor in predecessors:
result.add(predecessor)
if predecessor.from_lease_id == predecessor.to_lease_id:
continue
if predecessor.from_lease_id in accumulator:
continue
result.update(get_related_lease_predecessors(predecessor.from_lease_id, accumulator))
return result
def get_related_leases(obj):
# Immediate successors
related_to_leases = set(RelatedLease.objects.filter(from_lease=obj).select_related('to_lease', 'from_lease'))
# All predecessors
related_from_leases = get_related_lease_predecessors(obj.id)
return {
'related_to': RelatedToLeaseSerializer(related_to_leases, many=True).data,
'related_from': RelatedFromLeaseSerializer(related_from_leases, many=True).data,
}
class LeaseRetrieveSerializer(LeaseSerializerBase):
related_leases = serializers.SerializerMethodField()
preparer = UserSerializer()
infill_development_compensations = serializers.SerializerMethodField()
email_logs = serializers.SerializerMethodField()
area_notes = serializers.SerializerMethodField()
matching_basis_of_rents = serializers.SerializerMethodField()
def get_related_leases(self, obj):
return get_related_leases(obj)
def override_permission_check_field_name(self, field_name):
if field_name == 'infill_development_compensations':
return 'infill_development_compensation_leases'
if field_name in ('area_notes', 'email_logs'):
return 'lease_areas'
return field_name
def get_infill_development_compensations(self, obj):
infill_development_compensations = InfillDevelopmentCompensation.objects.filter(
infill_development_compensation_leases__lease__id=obj.id)
return [{'id': idc.id, 'name': idc.name} for idc in infill_development_compensations]
def get_email_logs(self, obj):
from leasing.serializers.email import EmailLogSerializer
lease_content_type = ContentType.objects.get_for_model(obj)
email_logs = EmailLog.objects.filter(content_type=lease_content_type, object_id=obj.id)
return EmailLogSerializer(email_logs, many=True).data
def get_area_notes(self, obj):
from leasing.serializers.area_note import AreaNoteSerializer
area_notes = None
combined_area = obj.lease_areas.aggregate(union=Union("geometry"))["union"]
if combined_area:
area_notes = AreaNote.objects.filter(geometry__intersects=combined_area)
return AreaNoteSerializer(area_notes, many=True).data
def get_matching_basis_of_rents(self, obj):
from leasing.serializers.basis_of_rent import BasisOfRentSerializer
q = Q()
property_identifiers = obj.lease_areas.values_list("identifier", flat=True)
if property_identifiers:
q = Q(property_identifiers__identifier__in=property_identifiers)
combined_area = obj.lease_areas.aggregate(union=Union("geometry"))["union"]
if combined_area:
q |= Q(geometry__intersects=combined_area)
if not q:
return []
return BasisOfRentSerializer(BasisOfRent.objects.filter(q), many=True).data
class Meta:
model = Lease
fields = '__all__'
exclude = None
class LeaseUpdateSerializer(UpdateNestedMixin, EnumSupportSerializerMixin, FieldPermissionsSerializerMixin,
serializers.ModelSerializer):
id = serializers.ReadOnlyField()
identifier = LeaseIdentifierSerializer(read_only=True)
tenants = TenantCreateUpdateSerializer(many=True, required=False, allow_null=True)
lease_areas = LeaseAreaCreateUpdateSerializer(many=True, required=False, allow_null=True)
lessor = InstanceDictPrimaryKeyRelatedField(instance_class=Contact, queryset=Contact.objects.filter(is_lessor=True),
related_serializer=ContactSerializer, required=False, allow_null=True)
contracts = ContractCreateUpdateSerializer(many=True, required=False, allow_null=True)
decisions = DecisionCreateUpdateNestedSerializer(many=True, required=False, allow_null=True)
inspections = InspectionSerializer(many=True, required=False, allow_null=True)
rents = RentCreateUpdateSerializer(many=True, required=False, allow_null=True)
basis_of_rents = LeaseBasisOfRentCreateUpdateSerializer(many=True, required=False, allow_null=True)
preparer = InstanceDictPrimaryKeyRelatedField(instance_class=User, queryset=User.objects.all(),
related_serializer=UserSerializer, required=False, allow_null=True)
related_leases = serializers.SerializerMethodField()
notice_period = serializers.PrimaryKeyRelatedField(
required=False, allow_null=True, queryset=NoticePeriod.objects.all().annotate(
duration_as_interval=Cast('duration', DurationField())).order_by('duration_as_interval'))
invoice_notes = InvoiceNoteCreateUpdateSerializer(many=True, required=False, allow_null=True)
def get_related_leases(self, obj):
return get_related_leases(obj)
class Meta:
model = Lease
fields = '__all__'
read_only_fields = ('is_invoicing_enabled', 'is_rent_info_complete')
class LeaseCreateSerializer(LeaseUpdateSerializer):
relate_to = serializers.PrimaryKeyRelatedField(required=False, allow_null=True, queryset=Lease.objects.all())
relation_type = EnumField(required=False, allow_null=True, enum=LeaseRelationType)
def override_permission_check_field_name(self, field_name):
if field_name in ('relate_to', 'relation_type'):
return 'related_leases'
return field_name
class Meta:
model = Lease
fields = '__all__'
read_only_fields = ('is_invoicing_enabled', 'is_rent_info_complete')
| StarcoderdataPython |
3356401 | <gh_stars>10-100
"""
Clean HTML
"""
from relevanceai.operations_new.apibase import OperationAPIBase
from relevanceai.operations_new.processing.text.html_clean.base import (
CleanTextBase,
)
class CleanTextOps(CleanTextBase, OperationAPIBase):
"""
Clean text operations
"""
| StarcoderdataPython |
1611471 | <reponame>clembu/MenuCreator
# Mustard Menu Creator addon
# https://github.com/Mustard2/MenuCreator
bl_info = {
"name": "Menu Creator",
"description": "Create a custom menu for each Object. To add properties or collections, just right click on the properties and hit Add property to the Menu",
"author": "Mustard",
"version": (0, 0, 3),
"blender": (2, 91, 0),
"warning": "",
"wiki_url": "https://github.com/Mustard2/MenuCreator",
"category": "User Interface",
}
import bpy
import addon_utils
import sys
import os
import re
import time
import math
from bpy.types import Header, Menu, Panel
from bpy.props import *
from bpy.app.handlers import persistent
from mathutils import Vector, Color
import webbrowser
# CLASSES
# Arrays for ENUM properties
# Array to store different section type
mc_section_type_list = [
("DEFAULT","Standard","A simple collection of properties that can be added right clicking on fields -> Add Property to the Menu"),
("COLLECTION","Collection List","Right clicking on them in the Outliner, you can add collections whose elements can be shown/hidden in the Menu. Only one collection will be shown at the same time.\nIdeal for: Outfit lists","OUTLINER_COLLECTION",1)
]
# Array to store possible icons to be used by properties and sections
mc_icon_list = [
("NONE","No Icon","No Icon"),
("USER", "Face", "Face","USER",1),
("HAIR", "Hair", "Hair","HAIR",2),
("MOD_CLOTH", "Cloth", "Cloth","MOD_CLOTH",3),
("MATERIAL", "Material", "Material","MATERIAL",4),
("ARMATURE_DATA", "Armature", "Armature","ARMATURE_DATA",5),
("MOD_ARMATURE", "Armature", "Armature","MOD_ARMATURE",6),
("EXPERIMENTAL", "Experimental", "Experimental","EXPERIMENTAL",7),
("WORLD", "World", "World","WORLD",8),
("PARTICLEMODE", "Comb", "Comb","PARTICLEMODE",9)
]
# Class with all the settings variables
class MC_Settings(bpy.types.PropertyGroup):
# Update functions for settings
# Function to avoid edit mode and fixed object while exiting edit mode
def mc_ms_editmode_update(self, context):
if not self.ms_editmode:
for obj in bpy.data.objects:
obj.mc_edit_enable = False
return
# Function to save the fixed object pointer to be used until the object is released
def mc_em_fixobj_update(self, context):
if self.em_fixobj:
self.em_fixobj_pointer = context.active_object
return
# Main Settings definitions
ms_editmode: bpy.props.BoolProperty(name="Enable Edit Mode Tools",
description="Unlock tools to customize the menu.\nDisable when the Menu is complete",
default=False,
update = mc_ms_editmode_update)
ms_advanced: bpy.props.BoolProperty(name="Advanced Options",
description="Unlock advanced options",
default=False)
ms_debug: bpy.props.BoolProperty(name="Debug mode",
description="Unlock debug mode.\nMore messaged will be generated in the console.\nEnable it only if you encounter problems, as it might degrade general Blender performance",
default=False)
# Menu Specific properties
mss_name: bpy.props.StringProperty(name="Name",
description="Name of the menu.\nChoose the name of the menu to be shown before the properties",
default="Object: ")
mss_obj_name: bpy.props.BoolProperty(name="Show the Object Name",
description="Show the Object name after the Name.\nFor instance, if the Name is \"Object: \", the shown name will be \"Object: name_of_object\"",
default=True)
# Edit mode properties
em_fixobj: bpy.props.BoolProperty(name="Pin Object",
description="Pin the Object you are using to edit the menu.\nThe object you pin will be considered as the target of all properties addition, and only this Object menu will be shown",
default=False,
update = mc_em_fixobj_update)
em_fixobj_pointer : bpy.props.PointerProperty(type=bpy.types.Object)
bpy.utils.register_class(MC_Settings)
bpy.types.Scene.mc_settings = bpy.props.PointerProperty(type=MC_Settings)
# Object specific properties
bpy.types.Object.mc_enable = bpy.props.BoolProperty(name="", default=False)
bpy.types.Object.mc_edit_enable = bpy.props.BoolProperty(name="Edit Mode", default=False, description="Enable edit mode in this menu.\nActivating this option you will have access to various tools to modify properties and sections")
# Class to store collections for section informations
class MCCollectionItem(bpy.types.PropertyGroup):
collection : bpy.props.PointerProperty(name="Collection",type=bpy.types.Collection)
bpy.utils.register_class(MCCollectionItem)
# Class to store section informations
class MCSectionItem(bpy.types.PropertyGroup):
# Properties and update functions
# Function to update the collapsed status if the collapsed section property is changed
def mc_sections_collapsed_update(self, context):
if not self.collapsable:
self.collapsed = False
return
# Function to create an array of tuples for enum collections
def mc_collections_list(self, context):
items = []
for el in self.collections:
if hasattr(el.collection, 'name'):
items.append( (el.collection.name,el.collection.name,el.collection.name) )
return sorted(items)
# Function to update global collection properties
def mc_collections_list_update(self, context):
for collection in self.collections:
if collection.collection.name == self.collections_list:
collection.collection.hide_viewport = False
collection.collection.hide_render = False
else:
collection.collection.hide_viewport = True
collection.collection.hide_render = True
def mc_collections_global_options_update(self, context):
items = []
i = 0
for el in self.collections:
for obj in el.collection.objects:
if obj.type == "MESH":
obj.data.use_auto_smooth = self.collections_global_normalautosmooth
for modifier in obj.modifiers:
if modifier.type == "CORRECTIVE_SMOOTH":
modifier.show_viewport = self.collections_global_smoothcorrection
modifier.show_render = self.collections_global_smoothcorrection
elif modifier.type == "MASK":
modifier.show_viewport = self.collections_global_mask
modifier.show_render = self.collections_global_mask
elif modifier.type == "SHRINKWRAP":
modifier.show_viewport = self.collections_global_shrinkwrap
modifier.show_render = self.collections_global_shrinkwrap
if self.outfit_enable:
for modifier in self.outfit_body.modifiers:
if modifier.type == "MASK":
if not self.collections_global_mask:
modifier.show_viewport = False
modifier.show_render = False
else:
for el in self.collections:
for obj in el.collection.objects:
if obj.name in modifier.name and not obj.hide_viewport:
modifier.show_viewport = True
modifier.show_render = True
return
# Poll function for the selection of mesh only in pointer properties
def mc_poll_mesh(self, object):
return object.type == 'MESH'
# Global section options
id : bpy.props.IntProperty(name="Section ID")
name : bpy.props.StringProperty(name="Section Name")
icon : bpy.props.StringProperty(name="Section Icon", default="")
type : bpy.props.StringProperty(name="Section Type", default="DEFAULT")
collapsable : bpy.props.BoolProperty(name="Section Collapsable", default=False, update=mc_sections_collapsed_update)
# Global section option enforcer
collapsed : bpy.props.BoolProperty(name="", default = False, description="")
# COLLECTION type options
collections_enable_global_smoothcorrection: bpy.props.BoolProperty(default=False)
collections_enable_global_shrinkwrap: bpy.props.BoolProperty(default=False)
collections_enable_global_mask: bpy.props.BoolProperty(default=False)
collections_enable_global_normalautosmooth: bpy.props.BoolProperty(default=False)
# COLLECTION type data
collections: bpy.props.CollectionProperty(name="Section Collection List", type=MCCollectionItem)
collections_list: bpy.props.EnumProperty(name="Section Collection List", items = mc_collections_list, update=mc_collections_list_update)
collections_global_smoothcorrection: bpy.props.BoolProperty(name="Smooth Correction", default=True, update=mc_collections_global_options_update)
collections_global_shrinkwrap: bpy.props.BoolProperty(name="Shrinkwrap", default=True, update=mc_collections_global_options_update)
collections_global_mask: bpy.props.BoolProperty(name="Mask", default=True, update=mc_collections_global_options_update)
collections_global_normalautosmooth: bpy.props.BoolProperty(name="Normals Auto Smooth", default=True, update=mc_collections_global_options_update)
# Outfit variant
outfit_enable : bpy.props.BoolProperty(name="Outfit", default=False)
outfit_body : bpy.props.PointerProperty(name="Outfit Body", description = "The masks of this object will be switched on/off depending on which elements of the collections visibility", type=bpy.types.Object, poll=mc_poll_mesh)
bpy.utils.register_class(MCSectionItem)
bpy.types.Object.mc_sections = bpy.props.CollectionProperty(type=MCSectionItem)
# Class to store linked properties informations
class MCLinkedPropertyItem(bpy.types.PropertyGroup):
path: bpy.props.StringProperty(name="Property Path")
id : bpy.props.StringProperty(name="Property Identifier")
bpy.utils.register_class(MCLinkedPropertyItem)
# Class to store properties informations
class MCPropertyItem(bpy.types.PropertyGroup):
mc_id : bpy.props.IntProperty(name="Section ID")
name : bpy.props.StringProperty(name="Property Name")
path: bpy.props.StringProperty(name="Property Path")
id : bpy.props.StringProperty(name="Property Identifier")
icon : bpy.props.EnumProperty(name="Property Icon", default="NONE",items=mc_icon_list)
section : bpy.props.StringProperty(name="Section", default="Unsorted")
hide : bpy.props.BoolProperty(name="Hide Property", default=False)
linked_props: bpy.props.CollectionProperty(name="Linked properties", type=MCLinkedPropertyItem)
bpy.utils.register_class(MCPropertyItem)
bpy.types.Object.mc_properties = bpy.props.CollectionProperty(type=MCPropertyItem)
# COLLECTION MANAGEMENT FUNCTIONS
# ---- Properties only functions
# Function to remove a specific property from the collection
# Return 1 if the property was found and deleted
def mc_remove_property_item(collection, item):
i=-1
for el in collection:
i=i+1
if el.path == item[1] and el.id == item[2]:
break
if i>=0:
collection.remove(i)
return i>=0
# Function to add a specific property to the collection, if not already there
# Return 0 if the property has not been added because already in the properties list
def mc_add_property_item(collection, item):
i=True
for el in collection:
if el.path == item[1] and el.id == item[2]:
i=False
break
if i:
add_item = collection.add()
add_item.name = item[0]
add_item.path = item[1]
add_item.id = item[2]
add_item.mc_id = mc_len_collection(collection)
return i
# Function to find the index of a property
def mc_find_index(collection, item):
i=-1
for el in collection:
i=i+1
if el.path == item[1] and el.id == item[2]:
break
return i
# Function to clean properties of a single object
def mc_clean_single_properties(obj):
obj.mc_properties.clear()
# Function to clean all the properties of every object
def mc_clean_properties():
for obj in bpy.data.objects:
obj.mc_properties.clear()
# Function to print the properties
def mc_print_properties():
for obj in bpy.data.objects:
for el in obj.mc_properties:
print(el.id + " : property" + el.name + " with path "+el.path)
# Function to iutput the ID of the element
def mc_prop_ID(elem):
return elem.mc_id
# ---- Sections only functions
# Function to create an array of tuples for enum properties
def mc_section_list(scene, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
items = []
i = 0
for el in obj.mc_sections:
if el.type == "DEFAULT":
items.append( (el.name,el.name,el.name,el.icon,i) )
i = i + 1
return items
# Function to clean sections of a single object
def mc_clean_single_sections(obj):
obj.mc_sections.clear()
# Function to clean the sections of every object
def mc_clean_sections():
for obj in bpy.data.objects:
obj.mc_sections.clear()
# Function to find the index of a section from the name
def mc_find_index_section(collection, item):
i=-1
for el in collection:
i=i+1
if el.name == item:
break
return i
# Function to find the index of a section from the ID
def mc_find_index_section_fromID(collection, item):
i=-1
for el in collection:
i=i+1
if el.id == item:
break
return i
# Function to iutput the ID of the element
def mc_sec_ID(elem):
return elem.id
# ---- Sections and properties functions
# Function to find the length of a collection
def mc_len_collection(collection):
i=0
for el in collection:
i=i+1
return i
# OPERATORS
# Right click functions and operators
def dump(obj, text):
print('-'*40, text, '-'*40)
for attr in dir(obj):
if hasattr( obj, attr ):
print( "obj.%s = %s" % (attr, getattr(obj, attr)))
# Operator to add the right click button on properties
class MC_AddProperty(bpy.types.Operator):
"""Add the property to the menu"""
bl_idname = "mc.add_property"
bl_label = "Add property to Menu"
@classmethod
def poll(cls, context):
return context.active_object is not None
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
#if hasattr(context, 'button_pointer'):
# btn = context.button_pointer
# dump(btn, 'button_pointer')
if hasattr(context, 'button_prop'):
prop = context.button_prop
#dump(prop, 'button_prop')
try:
bpy.ops.ui.copy_data_path_button(full_path=True)
except:
self.report({'WARNING'}, 'Menu Creator - Invalid selection.')
return {'FINISHED'}
rna, path = context.window_manager.clipboard.rsplit('.', 1)
if '][' in path:
path, rem = path.rsplit('[', 1)
rna = rna + '.' + path
path = '[' + rem
elif '[' in path:
path, rem = path.rsplit('[', 1)
if obj.mc_enable:
if mc_add_property_item(obj.mc_properties, [prop.name,rna,path]):
self.report({'INFO'}, 'Menu Creator - Property added to the \'' + obj.name + '\' menu.')
else:
self.report({'WARNING'}, 'Menu Creator - Property of \'' + obj.name + '\' was already added.')
else:
self.report({'ERROR'}, 'Menu Creator - Can not add property \'' + obj.name + '\'. No menu has been initialized.')
#if hasattr(context, 'button_operator'):
# op = context.button_operator
# dump(op, 'button_operator')
return {'FINISHED'}
# Operator to link a property to another one
class MC_LinkProperty(bpy.types.Operator):
"""Link the selected property to this one"""
bl_idname = "mc.link_property"
bl_label = "Link Property"
prop_id: bpy.props.StringProperty()
prop_path: bpy.props.StringProperty()
@classmethod
def poll(cls, context):
return context.active_object is not None
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
if hasattr(context, 'button_prop'):
prop = context.button_prop
#dump(prop, 'button_prop')
try:
bpy.ops.ui.copy_data_path_button(full_path=True)
except:
self.report({'WARNING'}, 'Menu Creator - Invalid selection.')
return {'FINISHED'}
rna, path = context.window_manager.clipboard.rsplit('.', 1)
if '][' in path:
path, rem = path.rsplit('[', 1)
rna = rna + '.' + path
path = '[' + rem
elif '[' in path:
path, rem = path.rsplit('[', 1)
if obj.mc_enable:
i = mc_find_index(obj.mc_properties, ['',self.prop_path,self.prop_id])
prop_type = type(eval(obj.mc_properties[i].path + '.' + obj.mc_properties[i].id))
if '].[' in rna + '.' + path:
link_type = type(eval(rna + path))
else:
link_type = type(eval(rna + '.' + path))
if prop_type == link_type:
already_added = False
for el in obj.mc_properties[i].linked_props:
if el.path == rna and el.id == path:
already_added = True
break
if not already_added:
add_item = obj.mc_properties[i].linked_props.add()
add_item.id = path
add_item.path = rna
self.report({'INFO'}, 'Menu Creator - Property \'' + path + '\' linked to \'' + obj.mc_properties[i].name + '\'')
else:
self.report({'WARNING'}, 'Menu Creator - Property \'' + path + '\' already linked to \'' + obj.mc_properties[i].name + '\'')
else:
self.report({'ERROR'}, 'Menu Creator - Property \'' + path + '\' can not be linked to \'' + obj.mc_properties[i].name + '\'')
if settings.ms_debug:
print('MenuCreator - Property \'' + path + '\' can not be linked to \'' + obj.mc_properties[i].name + '\'')
print(' Data types are ' + str(link_type) + ' and ' + str(prop_type) + '.')
else:
self.report({'ERROR'}, 'Menu Creator - Can not link property in \'' + obj.name + '\'. No menu has been initialized.')
return {'FINISHED'}
# Operator to add the collection to the selected section
class MC_AddCollection(bpy.types.Operator):
"""Add the collection to the selected section"""
bl_idname = "mc.add_collection"
bl_label = "Add collection to Menu"
section: bpy.props.StringProperty()
@classmethod
def poll(cls, context):
return context.active_object is not None
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
add_coll = bpy.context.collection
sec_index = mc_find_index_section(obj.mc_sections, self.section)
i=True
for el in obj.mc_sections[sec_index].collections:
if el.collection == add_coll:
i=False
break
if i:
add_item = obj.mc_sections[sec_index].collections.add()
add_item.collection = add_coll
self.report({'INFO'}, 'Menu Creator - Collection has been added to section \''+self.section+'\'.')
else:
self.report({'WARNING'}, 'Menu Creator - Collection was already added to section \''+self.section+'\'.')
return {'FINISHED'}
class WM_MT_button_context(Menu):
bl_label = "Custom Action"
def draw(self, context):
pass
def menu_func(self, context):
if hasattr(context, 'button_prop'):
layout = self.layout
layout.separator()
layout.operator(MC_AddProperty.bl_idname)
def menu_func_link(self, context):
if hasattr(context, 'button_prop'):
layout = self.layout
#layout.label(text="Try")
self.layout.menu(OUTLINER_MT_link_mcmenu.bl_idname)
class OUTLINER_MT_collection(Menu):
bl_label = "Custom Action Collection"
def draw(self, context):
pass
# Operator to create the list of sections when right clicking on the property -> Link to property
class OUTLINER_MT_link_mcmenu(bpy.types.Menu):
bl_idname = 'mc.menu_link'
bl_label = 'Link to Property'
def draw(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
layout = self.layout
no_prop = True
for prop in obj.mc_properties:
op = layout.operator(MC_LinkProperty.bl_idname, text=prop.name, icon=prop.icon)
op.prop_id = prop.id
op.prop_path = prop.path
no_prop = False
if no_prop:
layout.label(text="No properties found")
# Operator to create the list of sections when right clicking on the collection -> Add collection to Section
class OUTLINER_MT_collection_mcmenu(bpy.types.Menu):
bl_idname = 'mc.menu_collection'
bl_label = 'Add Collection to Section'
def draw(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
layout = self.layout
no_col_sec = True
for sec in obj.mc_sections:
if sec.type == "COLLECTION":
layout.operator(MC_AddCollection.bl_idname, text=sec.name, icon=sec.icon).section = sec.name
no_col_sec = False
if no_col_sec:
layout.label(text="No Collection List sections found")
def mc_collection_menu(self, context):
self.layout.separator()
self.layout.menu(OUTLINER_MT_collection_mcmenu.bl_idname)
# Operator to clean all properties and sections from all objects
class MC_CleanAll(bpy.types.Operator):
"""Clean all the menus.\nIf you choose reset, it will also delete all Menu options from all objects"""
bl_idname = "mc.cleanprop"
bl_label = "Clean all the properties"
reset : BoolProperty(default=False)
def execute(self, context):
mc_clean_properties()
mc_clean_sections()
if self.reset:
for obj in bpy.data.objects:
obj.mc_enable = False
self.report({'INFO'}, 'Menu Creator - All the objects has been reset.')
return {'FINISHED'}
# Operator to clean all properties and sections from an objects. If reset is on, it will also disable the menu for that object
class MC_CleanObject(bpy.types.Operator):
"""Clean all the object properties.\nIf you choose reset, it will also delete all Menu options from the object"""
bl_idname = "mc.cleanpropobj"
bl_label = "Clean the object"
reset : BoolProperty(default=False)
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
mc_clean_single_properties(obj)
mc_clean_single_sections(obj)
if self.reset:
obj.mc_enable = False
self.report({'INFO'}, 'Menu Creator - \'' + obj.name + '\' menu has been reset.')
return {'FINISHED'}
# Operator to remove a linked property (button in UI)
class MC_RemoveLinkedProperty(bpy.types.Operator):
"""Remove the linked property"""
bl_idname = "mc.removelinkedproperty"
bl_label = ""
prop_index : bpy.props.IntProperty()
link_path : bpy.props.StringProperty()
link_id : bpy.props.StringProperty()
@classmethod
def poll(cls, context):
return context.active_object is not None
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
props = obj.mc_properties
i=-1
for el in obj.mc_properties[self.prop_index].linked_props:
i=i+1
if el.path == self.link_path and el.id == self.link_id:
break
if i>=0:
obj.mc_properties[self.prop_index].linked_props.remove(i)
return {'FINISHED'}
# Single Property settings
class MC_PropertySettings(bpy.types.Operator):
"""Modify some of the property settings"""
bl_idname = "mc.propsettings"
bl_label = "Property settings"
bl_icon = "PREFERENCES"
bl_options = {'UNDO'}
name : bpy.props.StringProperty(name='Name',
description="Choose the name of the property")
path : bpy.props.StringProperty()
id : bpy.props.StringProperty()
icon : bpy.props.EnumProperty(name='Icon',
description="Choose the icon.\nNote that the icon name MUST respect Blender convention. All the icons can be found in the Icon Viewer default Blender addon.",items=mc_icon_list)
section : bpy.props.EnumProperty(name='Section',
description="Choose the icon.\nNote that the icon name MUST respect Blender convention. All the icons can be found in the Icon Viewer default Blender addon.",items=mc_section_list)
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
i = mc_find_index(obj.mc_properties,[self.name,self.path,self.id])
if i>=0:
obj.mc_properties[i].name = self.name
obj.mc_properties[i].icon = self.icon
obj.mc_properties[i].section = self.section
return {'FINISHED'}
def invoke(self, context, event):
settings = bpy.context.scene.mc_settings
if settings.ms_debug:
return context.window_manager.invoke_props_dialog(self, width=650)
else:
return context.window_manager.invoke_props_dialog(self, width=550)
def draw(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
i = mc_find_index(obj.mc_properties,[self.name,self.path,self.id])
layout = self.layout
layout.prop(self, "name")
layout.prop(self, "icon")
layout.prop(self, "section")
layout.separator()
layout.label(text="Property info", icon="INFO")
box = layout.box()
box.label(text="Identifier: "+self.id)
if settings.ms_debug:
layout.label(text="Full path", icon="RNA")
box = layout.box()
box.label(text=self.path+'.'+self.id)
if len(obj.mc_properties[i].linked_props)>0:
layout.separator()
layout.label(text="Linked Properties", icon="LINKED")
box = layout.box()
for prop in obj.mc_properties[i].linked_props:
row = box.row()
row.label(text=prop.path + '.' + prop.id, icon="DOT")
link_del_op = row.operator(MC_RemoveLinkedProperty.bl_idname, icon="X")
link_del_op.prop_index = i
link_del_op.link_id = prop.id
link_del_op.link_path = prop.path
# Swap Properties Operator
class MC_SwapProperty(bpy.types.Operator):
"""Change the position of the property"""
bl_idname = "mc.swapprops"
bl_label = "Change the property position"
mod : BoolProperty(default=False) # False = down, True = Up
name : bpy.props.StringProperty()
path : bpy.props.StringProperty()
id : bpy.props.StringProperty()
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
col = sorted(obj.mc_properties, key = mc_prop_ID)
col_len = mc_len_collection(col)
i = mc_find_index(col,[self.name,self.path,self.id])
if i>=0:
if self.mod:
j=i
while j>0:
j = j - 1
if col[j].section==col[i].section:
break
if j>-1:
col[i].mc_id = j
col[j].mc_id = i
else:
j=i
while j<col_len-1:
j=j+1
if col[j].section==col[i].section:
break
if j<col_len:
col[i].mc_id = j
col[j].mc_id = i
return {'FINISHED'}
# Operator to remove a property (button in UI)
class MC_RemoveProperty(bpy.types.Operator):
"""Remove the property from the current menu"""
bl_idname = "mc.removeproperty"
bl_label = "Remove the property"
path : bpy.props.StringProperty()
id : bpy.props.StringProperty()
@classmethod
def poll(cls, context):
return context.active_object is not None
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
props = obj.mc_properties
mc_remove_property_item(obj.mc_properties,['',self.path,self.id])
return {'FINISHED'}
# Operator to add a new section
class MC_AddSection(bpy.types.Operator):
"""Add a new section to the section list."""
bl_idname = "mc.addsection"
bl_label = "Add section"
bl_icon = "PREFERENCES"
bl_options = {'UNDO'}
name : bpy.props.StringProperty(name='Name',
description="Choose the name of the section", default = "Section")
icon : bpy.props.EnumProperty(name='Icon',
description="Choose the icon.\nNote that the icon name MUST respect Blender convention. All the icons can be found in the Icon Viewer default Blender addon",items=mc_icon_list)
collapsable : bpy.props.BoolProperty(name="Collapsable",
description="Add a collapse button near the name of the section")
type : bpy.props.EnumProperty(name='Type',
description="Choose the section type",items=mc_section_type_list)
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
sec_obj = obj.mc_sections
sec_len = mc_len_collection(sec_obj)
if self.name!="":
i=True
j=-1
for el in sec_obj:
j=j+1
if el.name == self.name:
i=False
break
if i:
add_item = sec_obj.add()
add_item.name = self.name
add_item.type = self.type
add_item.icon = self.icon
add_item.collapsable = self.collapsable
add_item.id = sec_len
self.report({'INFO'}, 'Menu Creator - Section \'' + self.name +'\' created.')
else:
self.report({'WARNING'}, 'Menu Creator - Cannot create sections with same name.')
else:
self.report({'ERROR'}, 'Menu Creator - Cannot create sections with this name.')
return {'FINISHED'}
def invoke(self, context, event):
settings = bpy.context.scene.mc_settings
if settings.ms_debug:
return context.window_manager.invoke_props_dialog(self, width=550)
else:
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
settings = bpy.context.scene.mc_settings
layout = self.layout
scale = 3.0
row=layout.row()
row.label(text="Name:")
row.scale_x=scale
row.prop(self, "name", text="")
row=layout.row()
row.label(text="Icon:")
row.scale_x=scale
row.prop(self, "icon", text="")
row=layout.row()
row.label(text="")
row.scale_x=scale
row.prop(self, "collapsable")
layout.separator()
row=layout.row()
row.label(text="Type:")
row.scale_x=scale
row.prop(self, "type", text="")
# Section Property settings
class MC_SectionSettings(bpy.types.Operator):
"""Modify the section settings."""
bl_idname = "mc.sectionsettings"
bl_label = "Section settings"
bl_icon = "PREFERENCES"
bl_options = {'UNDO'}
name : bpy.props.StringProperty(name='Name',
description="Choose the name of the section")
icon : bpy.props.EnumProperty(name='Icon',
description="Choose the icon.\nNote that the icon name MUST respect Blender convention. All the icons can be found in the Icon Viewer default Blender addon.",items=mc_icon_list)
collapsable : bpy.props.BoolProperty(name="Collapsable",
description="Add a collapse button near the name of the section")
type : bpy.props.EnumProperty(name='Type',
description="The Section type can not be changed after creation",items=mc_section_type_list)
# COLLECTION type settings
collections_enable_global_smoothcorrection : bpy.props.BoolProperty(name="Enable Global Smooth Correction")
collections_enable_global_shrinkwrap : bpy.props.BoolProperty(name="Enable Global Shrinkwrap")
collections_enable_global_mask : bpy.props.BoolProperty(name="Enable Global Mask")
collections_enable_global_normalautosmooth : bpy.props.BoolProperty(name="Enable Global Normal Auto Smooth")
# Outfit variant
outfit_enable : bpy.props.BoolProperty(name="Outfit", description="With this option a Body entry will be added to the Section. This Body's masks will be enabled when elements of the collections are shown, and viceversa, if the masks are called the same name as the element of the collection")
name_edit : bpy.props.StringProperty(name='Name',
description="Choose the name of the section")
ID : bpy.props.IntProperty()
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
prop_obj = obj.mc_properties
sec_obj = obj.mc_sections
i = mc_find_index_section(sec_obj,self.name)
if i>=0:
for el in prop_obj:
if el.section == self.name:
el.section = self.name_edit
sec_obj[i].name = self.name_edit
sec_obj[i].icon = self.icon
sec_obj[i].collapsable = self.collapsable
sec_obj[i].collections_enable_global_smoothcorrection = self.collections_enable_global_smoothcorrection
sec_obj[i].collections_enable_global_shrinkwrap = self.collections_enable_global_shrinkwrap
sec_obj[i].collections_enable_global_mask = self.collections_enable_global_mask
sec_obj[i].collections_enable_global_normalautosmooth = self.collections_enable_global_normalautosmooth
sec_obj[i].outfit_enable = self.outfit_enable
if obj.type == "MESH":
sec_obj[i].outfit_body = obj
return {'FINISHED'}
def invoke(self, context, event):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
sec_obj = obj.mc_sections
self.name_edit = self.name
self.ID = mc_find_index_section(sec_obj,self.name)
self.collapsable = sec_obj[self.ID].collapsable
self.collections_enable_global_smoothcorrection = sec_obj[self.ID].collections_enable_global_smoothcorrection
self.collections_enable_global_shrinkwrap = sec_obj[self.ID].collections_enable_global_shrinkwrap
self.collections_enable_global_mask = sec_obj[self.ID].collections_enable_global_mask
self.collections_enable_global_normalautosmooth = sec_obj[self.ID].collections_enable_global_normalautosmooth
self.outfit_enable = sec_obj[self.ID].outfit_enable
return context.window_manager.invoke_props_dialog(self)
def draw(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
sec_obj = obj.mc_sections
scale = 3.0
layout = self.layout
row=layout.row()
row.label(text="Name:")
row.scale_x=scale
row.prop(self, "name_edit", text="")
row=layout.row()
row.label(text="Icon:")
row.scale_x=scale
row.prop(self, "icon", text="")
row=layout.row()
row.label(text="")
row.scale_x=scale
row.prop(self, "collapsable")
layout.separator()
col = layout.column()
col.enabled = False
col.prop(self, "type")
if self.type == "COLLECTION":
layout.separator()
row = layout.row()
row.label(text="")
row.scale_x = 3
row.prop(self,"collections_enable_global_smoothcorrection")
row = layout.row()
row.label(text="")
row.scale_x = 3
row.prop(self,"collections_enable_global_shrinkwrap")
row = layout.row()
row.label(text="")
row.scale_x = 3
row.prop(self,"collections_enable_global_mask")
row = layout.row()
row.label(text="")
row.scale_x = 3
row.prop(self,"collections_enable_global_normalautosmooth")
layout.separator()
row = layout.row()
row.label(text="")
row.scale_x = 3
row.prop(self,"outfit_enable")
# Operator to change Section position
class MC_SwapSection(bpy.types.Operator):
"""Change the position of the section"""
bl_idname = "mc.swapsections"
bl_label = "Change the section position"
mod : BoolProperty(default=False) # False = down, True = Up
name : bpy.props.StringProperty()
icon : bpy.props.StringProperty()
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
col = obj.mc_sections
col_len = mc_len_collection(col)
sec_index = mc_find_index_section(col,self.name)
i = col[sec_index].id
if self.mod and i > 1:
j = mc_find_index_section_fromID(col, i-1)
col[sec_index].id = i-1
col[j].id = i
elif not self.mod and i < col_len-1:
j = mc_find_index_section_fromID(col, i+1)
col[sec_index].id = i+1
col[j].id = i
return {'FINISHED'}
# Delete Section
class MC_DeleteSection(bpy.types.Operator):
"""Delete Section"""
bl_idname = "mc.deletesection"
bl_label = "Section settings"
bl_options = {'UNDO'}
name : bpy.props.StringProperty(name='Name',
description="Choose the name of the section")
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
sec_obj = obj.mc_sections
i=-1
for el in sec_obj:
i=i+1
if el.name == self.name:
break
if i>=0:
j = sec_obj[i].id
for k in range(j+1,len(sec_obj)):
sec_obj[mc_find_index_section_fromID(sec_obj, k)].id = k-1
sec_obj.remove(i)
self.report({'INFO'}, 'Menu Creator - Section \'' + self.name +'\' deleted.')
return {'FINISHED'}
# Operator to shiwtch visibility of an object
class MC_CollectionObjectVisibility(bpy.types.Operator):
"""Chenge the visibility of the selected object"""
bl_idname = "mc.colobjvisibility"
bl_label = "Hide/Unhide Object visibility"
bl_options = {'UNDO'}
obj : bpy.props.StringProperty()
sec : bpy.props.StringProperty()
def execute(self, context):
bpy.data.objects[self.obj].hide_viewport = not bpy.data.objects[self.obj].hide_viewport
bpy.data.objects[self.obj].hide_render = not bpy.data.objects[self.obj].hide_render
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
body_obj = settings.em_fixobj_pointer
else:
body_obj = context.active_object
sec_obj = body_obj.mc_sections
i = mc_find_index_section(sec_obj,self.sec)
if sec_obj[i].outfit_enable:
if sec_obj[i].outfit_body:
for modifier in sec_obj[i].outfit_body.modifiers:
if modifier.type == "MASK" and self.obj in modifier.name and sec_obj[i].collections_global_mask:
modifier.show_viewport = not bpy.data.objects[self.obj].hide_viewport
modifier.show_render = not bpy.data.objects[self.obj].hide_viewport
else:
self.report({'WARNING'}, 'Menu Creator - Outfit Body has not been specified.')
return {'FINISHED'}
# Operator to delete a collection
class MC_RemoveCollection(bpy.types.Operator):
"""Remove the selected collection from the Menu.\nThe collection will NOT be deleted"""
bl_idname = "mc.deletecollection"
bl_label = "Remove the selected collection from the menu"
bl_options = {'UNDO'}
col : bpy.props.StringProperty()
sec : bpy.props.StringProperty()
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
sec_obj = obj.mc_sections
sec_index = mc_find_index_section(sec_obj,self.sec)
i = 0
for el in sec_obj[sec_index].collections:
if el.collection.name == self.col:
sec_obj[sec_index].collections.remove(i)
break
i = i + 1
self.report({'INFO'}, 'Menu Creator - Collection removed from the Menu.')
return {'FINISHED'}
# Initial Configuration Operator
class MC_InitialConfiguration(bpy.types.Operator):
"""Clean all the object properties"""
bl_idname = "mc.initialconfig"
bl_label = "Clean all the properties"
def execute(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
mc_clean_single_sections(obj)
mc_clean_single_properties(obj)
add_item = obj.mc_sections.add()
add_item.id = 0
add_item.name = "Unsorted"
add_item.icon = "LIBRARY_DATA_BROKEN"
obj.mc_enable = True
self.report({'INFO'}, 'Menu Creator - Menu for \''+obj.name+'\' successfully created.')
return {'FINISHED'}
# USER INTERFACE
# Poll functions
@classmethod
def mc_panel_poll(cls, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
return obj.mc_enable
# User Interface Panels
class MainPanel:
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "Menu"
class PT_MenuCreator_InitialConfiguration_Panel(MainPanel, bpy.types.Panel):
bl_idname = "PT_MenuCreator_InitialConfiguration_Panel"
bl_label = "Initial Configuration"
@classmethod
def poll(cls, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
if obj is not None:
return not obj.mc_enable
else:
return False
def draw(self, context):
layout = self.layout
layout.label(text="Menu Configuration")
layout.operator('mc.initialconfig', text="Create Menu")
class PT_MenuCreator_Panel(MainPanel, bpy.types.Panel):
bl_idname = "PT_MenuCreator_Panel"
bl_label = "Menu"
@classmethod
def poll(cls, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
if obj is not None:
return obj.mc_enable
else:
return False
def draw(self, context):
settings = bpy.context.scene.mc_settings
if settings.em_fixobj:
obj = settings.em_fixobj_pointer
else:
obj = context.active_object
mc_col = obj.mc_properties
mcs_col = obj.mc_sections
mc_col_len = mc_len_collection(mc_col)
mcs_col_len = mc_len_collection(mcs_col)
layout = self.layout
row = layout.row(align=False)
menu_name = settings.mss_name;
if settings.mss_obj_name:
menu_name = menu_name+obj.name
row.label(text=menu_name)
if settings.ms_editmode:
row.prop(obj, "mc_edit_enable", text="",icon="MODIFIER")
row.operator("mc.addsection",text="",icon="ADD")
if settings.em_fixobj:
row.prop(settings,"em_fixobj",icon="PINNED", text="")
else:
row.prop(settings,"em_fixobj",icon="UNPINNED", text= "")
else:
if settings.em_fixobj:
row.prop(settings,"em_fixobj",icon="PINNED", text="")
else:
row.prop(settings,"em_fixobj",icon="UNPINNED", text= "")
if mcs_col_len>1:
for sec in sorted(mcs_col, key = mc_sec_ID):
if sec.type == "DEFAULT":
sec_empty = True
sec_hidden = True
for el in mc_col:
if el.section == sec.name:
sec_empty = False
if not el.hide:
sec_hidden = False
if (sec_empty and sec.name == "Unsorted") or (not obj.mc_edit_enable and not sec_empty and sec_hidden):
continue
else:
row = layout.row(align=False)
if sec.collapsable:
row.prop(sec, "collapsed", icon="TRIA_DOWN" if not sec.collapsed else "TRIA_RIGHT", icon_only=True, emboss=False)
if sec.icon == "NONE":
row.label(text=sec.name)
else:
row.label(text=sec.name,icon=sec.icon)
if obj.mc_edit_enable:
if sec.name != "Unsorted":
ssett_button = row.operator("mc.sectionsettings", icon="PREFERENCES", text="")
ssett_button.name = sec.name
ssett_button.icon = sec.icon
ssett_button.type = sec.type
row2 = row.row(align=True)
sup_button = row2.operator("mc.swapsections", icon="TRIA_UP", text="")
sup_button.mod = True
sup_button.name = sec.name
sup_button.icon = sec.icon
sdown_button = row2.operator("mc.swapsections", icon="TRIA_DOWN", text="")
sdown_button.mod = False
sdown_button.name = sec.name
sdown_button.icon = sec.icon
if not sec.collapsed:
box = layout.box()
if sec_empty and sec.name != "Unsorted":
row = box.row(align=False)
row.label(text="Section Empty", icon="ERROR")
row.operator("mc.deletesection",text="",icon="X").name = sec.name
if not sec.collapsed:
for el in sorted(mc_col, key = mc_prop_ID):
if el.section == sec.name:
el_index = mc_find_index(mc_col,[el.name,el.path,el.id])
if obj.mc_edit_enable:
row = box.row(align=False)
if el.icon !="NONE":
row.label(text=el.name,icon=el.icon)
else:
row.label(text=el.name)
sett_button = row.operator("mc.propsettings", icon="PREFERENCES", text="")
sett_button.name = el.name
sett_button.path = el.path
sett_button.id = el.id
sett_button.icon = el.icon
sett_button.section = el.section
row2 = row.row(align=True)
up_button = row2.operator("mc.swapprops", icon="TRIA_UP", text="")
up_button.mod = True
up_button.name = el.name
up_button.path = el.path
up_button.id = el.id
down_button = row2.operator("mc.swapprops", icon="TRIA_DOWN", text="")
down_button.mod = False
down_button.name = el.name
down_button.path = el.path
down_button.id = el.id
if el.hide:
row.prop(el, "hide", text="", icon = "HIDE_ON")
else:
row.prop(el, "hide", text="", icon = "HIDE_OFF")
del_button = row.operator("mc.removeproperty", icon="X", text="")
del_button.path = el.path
del_button.id = el.id
else:
if not el.hide:
row = box.row(align=False)
if el.icon !="NONE":
row.label(text=el.name,icon=el.icon)
else:
row.label(text=el.name)
row.scale_x=1.0
row.prop(eval(el.path), el.id, text="")
elif sec.type == "COLLECTION":
sec_empty = True
for el in sec.collections:
sec_empty = False
break
row = layout.row(align=False)
if sec.collapsable:
row.prop(sec, "collapsed", icon="TRIA_DOWN" if not sec.collapsed else "TRIA_RIGHT", icon_only=True, emboss=False)
if sec.icon == "NONE":
row.label(text=sec.name)
else:
row.label(text=sec.name,icon=sec.icon)
if obj.mc_edit_enable:
ssett_button = row.operator("mc.sectionsettings", icon="PREFERENCES", text="")
ssett_button.name = sec.name
ssett_button.icon = sec.icon
ssett_button.type = sec.type
row2 = row.row(align=True)
sup_button = row2.operator("mc.swapsections", icon="TRIA_UP", text="")
sup_button.mod = True
sup_button.name = sec.name
sup_button.icon = sec.icon
sdown_button = row2.operator("mc.swapsections", icon="TRIA_DOWN", text="")
sdown_button.mod = False
sdown_button.name = sec.name
sdown_button.icon = sec.icon
row.operator("mc.deletesection",text="",icon="X").name = sec.name
if not sec.collapsed and len(sec.collections)>0:
box = layout.box()
if sec.outfit_enable:
box.prop(sec,"outfit_body", text="Body", icon="OUTLINER_OB_MESH")
if len(sec.collections)>0:
box.label(text="Collection List", icon="OUTLINER_COLLECTION")
box = box.box()
for collection in sec.collections:
row = box.row()
row.label(text=collection.collection.name)
del_col = row.operator("mc.deletecollection",text="",icon="X")
del_col.sec = sec.name
del_col.col = collection.collection.name
else:
if not sec.collapsed:
box = layout.box()
if sec_empty:
row = box.row(align=False)
row.label(text="No Collection Assigned", icon="ERROR")
row.operator("mc.deletesection",text="",icon="X").name = sec.name
if len(sec.collections)>0:
box.prop(sec,"collections_list", text="")
box2 = box.box()
if len(bpy.data.collections[sec.collections_list].objects)>0:
for obj2 in bpy.data.collections[sec.collections_list].objects:
row = box2.row()
if obj2.hide_viewport:
vop=row.operator("mc.colobjvisibility",text=obj2.name, icon='OUTLINER_OB_'+obj2.type)
vop.obj = obj2.name
vop.sec = sec.name
else:
vop = row.operator("mc.colobjvisibility",text=obj2.name, icon='OUTLINER_OB_'+obj2.type, depress = True)
vop.obj = obj2.name
vop.sec = sec.name
else:
box2.label(text="This Collection seems empty", icon="ERROR")
if sec.collections_enable_global_smoothcorrection or sec.collections_enable_global_shrinkwrap or sec.collections_enable_global_mask or sec.collections_enable_global_normalautosmooth:
box.label(text= "Global Properties", icon="MODIFIER")
box2 = box.box()
if sec.collections_enable_global_smoothcorrection:
box2.prop(sec,"collections_global_smoothcorrection")
if sec.collections_enable_global_shrinkwrap:
box2.prop(sec,"collections_global_shrinkwrap")
if sec.collections_enable_global_mask:
box2.prop(sec,"collections_global_mask")
if sec.collections_enable_global_normalautosmooth:
box2.prop(sec,"collections_global_normalautosmooth")
else:
box = layout.box()
box.label(text="No section added.",icon="ERROR")
class PT_MenuCreator_Settings_Panel(MainPanel, bpy.types.Panel):
bl_idname = "PT_MenuCreator_Settings_Panel"
bl_label = "Settings"
def draw(self, context):
settings = bpy.context.scene.mc_settings
layout = self.layout
# Main Settings
layout.label(text="Main Settings",icon="SETTINGS")
box = layout.box()
box.prop(settings,"ms_editmode")
box.prop(settings,"ms_debug")
box.prop(settings,"ms_advanced")
# Menu specific settings
layout.label(text="Menu Settings",icon="SETTINGS")
box = layout.box()
box.prop(settings,"mss_name")
box.prop(settings,"mss_obj_name")
layout.label(text="Reset functions",icon="SETTINGS")
box = layout.box()
box.operator('mc.cleanpropobj', text="Reset Object", icon="ERROR").reset = True
box.operator('mc.cleanprop', text="Reset All Objects", icon="ERROR").reset = True
# Handlers
@persistent
def mc_scene_modification_handler(scene):
"""Called at every modification done to the scene."""
for obj in bpy.data.objects:
# Handler for linked custom properties
for prop in obj.mc_properties:
for link_prop in prop.linked_props:
if '].[' in link_prop.path + '.' + link_prop.id:
exec(link_prop.path + link_prop.id + '=' + prop.path + '.' + prop.id)
else:
exec(link_prop.path + '.' + link_prop.id + '=' + prop.path + '.' + prop.id)
# Part checking for changes in the list collection
# This is needed to ensure a clean list against deletion of collections from the outliner
for sec in obj.mc_sections:
i = 0
for el in sec.collections:
if not hasattr(el.collection, 'name'):
sec.collections.remove(i)
i = i + 1
# Register
classes = (
MC_AddProperty,
MC_LinkProperty,
WM_MT_button_context,
MC_RemoveProperty,
MC_CleanAll,
MC_CleanObject,
MC_RemoveLinkedProperty,
MC_PropertySettings,
MC_SwapProperty,
MC_AddSection,
MC_AddCollection,
MC_RemoveCollection,
MC_SectionSettings,
MC_SwapSection,
MC_DeleteSection,
MC_CollectionObjectVisibility,
MC_InitialConfiguration,
OUTLINER_MT_link_mcmenu,
OUTLINER_MT_collection_mcmenu,
PT_MenuCreator_InitialConfiguration_Panel,
PT_MenuCreator_Panel,
PT_MenuCreator_Settings_Panel
)
def register():
from bpy.utils import register_class
for cls in classes:
register_class(cls)
bpy.types.WM_MT_button_context.append(menu_func)
bpy.types.WM_MT_button_context.append(menu_func_link)
bpy.types.OUTLINER_MT_collection.append(mc_collection_menu)
# Handlers
bpy.app.handlers.depsgraph_update_post.append(mc_scene_modification_handler)
bpy.app.handlers.redo_post.append(mc_scene_modification_handler)
bpy.app.handlers.undo_post.append(mc_scene_modification_handler)
def unregister():
from bpy.utils import unregister_class
for cls in reversed(classes):
unregister_class(cls)
bpy.types.WM_MT_button_context.remove(menu_func)
bpy.types.WM_MT_button_context.remove(menu_func_link)
bpy.types.OUTLINER_MT_collection.remove(mc_collection_menu)
# Handlers
bpy.app.handlers.depsgraph_update_post.remove(mc_scene_modification_handler)
bpy.app.handlers.redo_post.remove(mc_scene_modification_handler)
bpy.app.handlers.undo_post.remove(mc_scene_modification_handler)
if __name__ == "__main__":
register()
| StarcoderdataPython |
1755689 | <reponame>Sahanduiuc/hrp
import numpy as np
def correlation_from_covariance(covariance):
# see https://gist.github.com/wiso/ce2a9919ded228838703c1c7c7dad13b
v = np.sqrt(np.diag(covariance))
return covariance / np.outer(v, v)
def bilinear(A, x):
return np.linalg.multi_dot((x, A, x))
def sub(A, idx):
return A[idx, :][:, idx]
def dist(cov):
cor = correlation_from_covariance(cov)
# This is DANGEROUS! 1.0 - 1.0 could easily be < 0
dist = ((1.0 - cor) / 2.)
# problem here with 1.0 - corr...
dist[dist < 0] = 0.0
return dist ** .5
| StarcoderdataPython |
1775445 | import torch
from torch.nn.utils.clip_grad import clip_grad_norm_, clip_grad_value_
from maml.utils import accuracy
def get_grad_norm(parameters, norm_type=2):
if isinstance(parameters, torch.Tensor):
parameters = [parameters]
parameters = list(filter(lambda p: p.grad is not None, parameters))
norm_type = float(norm_type)
total_norm = 0
for p in parameters:
param_norm = p.grad.data.norm(norm_type)
total_norm += param_norm.item() ** norm_type
total_norm = total_norm ** (1. / norm_type)
return total_norm
class MetaLearner(object):
def __init__(self, model, embedding_model, optimizers, fast_lr, loss_func,
first_order, num_updates, inner_loop_grad_clip,
collect_accuracies, device, embedding_grad_clip=0,
model_grad_clip=0):
self._model = model
self._embedding_model = embedding_model
self._fast_lr = fast_lr
self._optimizers = optimizers
self._loss_func = loss_func
self._first_order = first_order
self._num_updates = num_updates
self._inner_loop_grad_clip = inner_loop_grad_clip
self._collect_accuracies = collect_accuracies
self._device = device
self._embedding_grad_clip = embedding_grad_clip
self._model_grad_clip = model_grad_clip
self._grads_mean = []
self.to(device)
self._reset_measurements()
def _reset_measurements(self):
self._count_iters = 0.0
self._cum_loss = 0.0
self._cum_accuracy = 0.0
def _update_measurements(self, task, loss, preds):
self._count_iters += 1.0
self._cum_loss += loss.data.cpu().numpy()
if self._collect_accuracies:
self._cum_accuracy += accuracy(
preds, task.y).data.cpu().numpy()
def _pop_measurements(self):
measurements = {}
loss = self._cum_loss / (self._count_iters + 1e-32)
measurements['loss'] = loss
if self._collect_accuracies:
accuracy = self._cum_accuracy / (self._count_iters + 1e-32)
measurements['accuracy'] = accuracy
self._reset_measurements()
return measurements
def measure(self, tasks, train_tasks=None, adapted_params_list=None,
embeddings_list=None):
"""Measures performance on tasks. Either train_tasks has to be a list
of training task for computing embeddings, or adapted_params_list and
embeddings_list have to contain adapted_params and embeddings"""
if adapted_params_list is None:
adapted_params_list = [None] * len(tasks)
if embeddings_list is None:
embeddings_list = [None] * len(tasks)
for i in range(len(tasks)):
params = adapted_params_list[i]
if params is None:
params = self._model.param_dict
embeddings = embeddings_list[i]
task = tasks[i]
preds = self._model(task, params=params, embeddings=embeddings)
loss = self._loss_func(preds, task.y)
self._update_measurements(task, loss, preds)
measurements = self._pop_measurements()
return measurements
def update_params(self, loss, params):
"""Apply one step of gradient descent on the loss function `loss`,
with step-size `self._fast_lr`, and returns the updated parameters.
"""
create_graph = not self._first_order
grads = torch.autograd.grad(loss, params.values(),
create_graph=create_graph, allow_unused=True)
for (name, param), grad in zip(params.items(), grads):
if self._inner_loop_grad_clip > 0 and grad is not None:
grad = grad.clamp(min=-self._inner_loop_grad_clip,
max=self._inner_loop_grad_clip)
if grad is not None:
params[name] = param - self._fast_lr * grad
return params
def adapt(self, train_tasks, return_task_embedding=False):
adapted_params = []
embeddings_list = []
task_embeddings_list = []
for task in train_tasks:
params = self._model.param_dict
embeddings = None
if self._embedding_model:
if return_task_embedding:
embeddings, task_embedding = self._embedding_model(
task, return_task_embedding=True)
task_embeddings_list.append(task_embedding)
else:
embeddings = self._embedding_model(
task, return_task_embedding=False)
for i in range(self._num_updates):
preds = self._model(task, params=params, embeddings=embeddings)
loss = self._loss_func(preds, task.y)
params = self.update_params(loss, params=params)
if i == 0:
self._update_measurements(task, loss, preds)
adapted_params.append(params)
embeddings_list.append(embeddings)
measurements = self._pop_measurements()
if return_task_embedding:
return measurements, adapted_params, embeddings_list, task_embeddings_list
else:
return measurements, adapted_params, embeddings_list
def step(self, adapted_params_list, embeddings_list, val_tasks,
is_training):
for optimizer in self._optimizers:
optimizer.zero_grad()
post_update_losses = []
for adapted_params, embeddings, task in zip(
adapted_params_list, embeddings_list, val_tasks):
preds = self._model(task, params=adapted_params,
embeddings=embeddings)
loss = self._loss_func(preds, task.y)
post_update_losses.append(loss)
self._update_measurements(task, loss, preds)
mean_loss = torch.mean(torch.stack(post_update_losses))
if is_training:
self._optimizers[0].zero_grad()
if len(self._optimizers) > 1:
self._optimizers[1].zero_grad()
mean_loss.backward()
if len(self._optimizers) > 1:
if self._embedding_grad_clip > 0:
_grad_norm = clip_grad_norm_(
self._embedding_model.parameters(), self._embedding_grad_clip)
else:
_grad_norm = get_grad_norm(
self._embedding_model.parameters())
# grad_norm
self._grads_mean.append(_grad_norm)
self._optimizers[1].step()
if self._model_grad_clip > 0:
_grad_norm = clip_grad_norm_(
self._model.parameters(), self._model_grad_clip)
self._optimizers[0].step()
measurements = self._pop_measurements()
return measurements
def to(self, device, **kwargs):
self._device = device
self._model.to(device, **kwargs)
if self._embedding_model:
self._embedding_model.to(device, **kwargs)
def state_dict(self):
state = {
'model_state_dict': self._model.state_dict(),
'optimizers': [optimizer.state_dict() for optimizer in self._optimizers]
}
if self._embedding_model:
state.update(
{'embedding_model_state_dict':
self._embedding_model.state_dict()})
return state
| StarcoderdataPython |
3302363 | #!/usr/bin/env python3
import argparse
from .helper import OpenShiftDeployHelper
class OpenShiftDeployCLI(object):
def __init__(self):
self.parser = argparse.ArgumentParser()
self.subparsers = self.parser.add_subparsers()
self.parent_parser = argparse.ArgumentParser(add_help=False)
self.parent_parser.add_argument(
'--skip-update',
action='store_true',
help='skip automatic update of the conatiner image'
)
self.parent_parser_playbook = argparse.ArgumentParser(add_help=False)
self.parent_parser_playbook.add_argument(
'--vars-file',
required=True,
type=OpenShiftDeployHelper.file_exists,
help='path to your variables file'
)
self._add_subparser(
'shell',
help='open a shell inside of the environment'
)
self._add_subparser_playbook(
'create',
help='create the cluster',
)
self._add_subparser_playbook(
'destroy',
help='destroy the cluster'
)
self._add_subparser_playbook(
'start',
help='start the machines in your cluster'
)
self._add_subparser_playbook(
'stop',
help='stop the machines in your cluster'
)
self._add_subparser_playbook(
'info',
help='info about your cluster'
)
self._add_subparser_playbook(
'ssh',
help='ssh into the bastion for your cluster'
)
def _add_subparser(self, name, help=''):
parser = self.subparsers.add_parser(
name,
parents=[
self.parent_parser,
],
help=help,
)
parser.set_defaults(action=name)
return parser
def _add_subparser_playbook(self, name, help=''):
parser = self.subparsers.add_parser(
name,
parents=[
self.parent_parser,
self.parent_parser_playbook,
],
help=help,
)
parser.set_defaults(action=name)
return parser
def parse_known_args(self):
known_args, extra_args = self.parser.parse_known_args()
if not hasattr(known_args, 'action'):
self.parser.print_help()
exit(1)
return known_args, extra_args
| StarcoderdataPython |
169482 | from aetherling.helpers.nameCleanup import cleanName
from magma import *
from magma.frontend.coreir_ import GetCoreIRBackend
from aetherling.modules.hydrate import Dehydrate, Hydrate
from mantle.coreir.memory import DefineRAM, getRAMAddrWidth
__all__ = ['DefineRAMAnyType', 'RAMAnyType']
@cache_definition
def DefineRAMAnyType(t: Kind, n: int, read_latency = 0):
"""
Generate a RAM that handles n of any type.
RADDR : In(Array[log_2(n), Bit)], RDATA : Out(t), WADDR : In(Array(log_2(n), Bit)), WDATA : In(t), WE: In(Bit)
"""
class _RAM(Circuit):
name = 'RAM_{}t_{}n'.format(cleanName(str(t)), n)
addr_width = getRAMAddrWidth(n)
IO = ['RADDR', In(Bits[addr_width]),
'RDATA', Out(t),
'WADDR', In(Bits[addr_width]),
'WDATA', In(t),
'WE', In(Bit)
] + ClockInterface()
@classmethod
def definition(cls):
type_size_in_bits = GetCoreIRBackend().get_type(t).size
ram = DefineRAM(n, type_size_in_bits, read_latency=read_latency)()
type_to_bits = Dehydrate(t)
wire(cls.WDATA, type_to_bits.I)
wire(type_to_bits.out, ram.WDATA)
bits_to_type = Hydrate(t)
wire(ram.RDATA, bits_to_type.I)
wire(bits_to_type.out, cls.RDATA)
wire(cls.RADDR, ram.RADDR)
wire(ram.WADDR, cls.WADDR)
wire(cls.WE, ram.WE)
return _RAM
def RAMAnyType(t: Kind, n: int):
return DefineRAMAnyType(t, n)()
| StarcoderdataPython |
78254 | #!/usr/bin/env python
import re
import unittest
import mock
import pytest
from cachet_url_monitor.configuration import HttpStatus, Regex
from cachet_url_monitor.configuration import Latency
class LatencyTest(unittest.TestCase):
def setUp(self):
self.expectation = Latency({'type': 'LATENCY', 'threshold': 1})
def test_init(self):
assert self.expectation.threshold == 1
def test_get_status_healthy(self):
def total_seconds():
return 0.1
request = mock.Mock()
elapsed = mock.Mock()
request.elapsed = elapsed
elapsed.total_seconds = total_seconds
assert self.expectation.get_status(request) == 1
def test_get_status_unhealthy(self):
def total_seconds():
return 2
request = mock.Mock()
elapsed = mock.Mock()
request.elapsed = elapsed
elapsed.total_seconds = total_seconds
assert self.expectation.get_status(request) == 2
def test_get_message(self):
def total_seconds():
return 0.1
request = mock.Mock()
elapsed = mock.Mock()
request.elapsed = elapsed
elapsed.total_seconds = total_seconds
assert self.expectation.get_message(request) == ('Latency above '
'threshold: 0.1000 seconds')
class HttpStatusTest(unittest.TestCase):
def setUp(self):
self.expectation = HttpStatus({'type': 'HTTP_STATUS', 'status_range': "200-300"})
def test_init(self):
assert self.expectation.status_range == (200, 300)
def test_init_with_one_status(self):
"""With only one value, we still expect a valid tuple"""
self.expectation = HttpStatus({'type': 'HTTP_STATUS', 'status_range': "200"})
assert self.expectation.status_range == (200, 201)
def test_init_with_invalid_number(self):
"""Invalid values should just fail with a ValueError, as we can't convert it to int."""
with pytest.raises(ValueError):
self.expectation = HttpStatus({'type': 'HTTP_STATUS', 'status_range': "foo"})
def test_get_status_healthy(self):
request = mock.Mock()
request.status_code = 200
assert self.expectation.get_status(request) == 1
def test_get_status_unhealthy(self):
request = mock.Mock()
request.status_code = 400
assert self.expectation.get_status(request) == 3
def test_get_message(self):
request = mock.Mock()
request.status_code = 400
assert self.expectation.get_message(request) == ('Unexpected HTTP '
'status (400)')
class RegexTest(unittest.TestCase):
def setUp(self):
self.expectation = Regex({'type': 'REGEX', 'regex': '.*(find stuff).*'})
def test_init(self):
assert self.expectation.regex == re.compile('.*(find stuff).*', re.UNICODE + re.DOTALL)
def test_get_status_healthy(self):
request = mock.Mock()
request.text = 'We could find stuff\n in this body.'
assert self.expectation.get_status(request) == 1
def test_get_status_unhealthy(self):
request = mock.Mock()
request.text = 'We will not find it here'
assert self.expectation.get_status(request) == 3
def test_get_message(self):
request = mock.Mock()
request.text = 'We will not find it here'
assert self.expectation.get_message(request) == ('Regex did not match '
'anything in the body')
| StarcoderdataPython |
3243506 | """
models.py
App Engine datastore models
"""
from google.appengine.ext import ndb
class Report(ndb.Model):
created_at = ndb.DateTimeProperty('c', auto_now=True)
modified_at = ndb.DateTimeProperty('m', auto_now_add=True)
google_places_id = ndb.StringProperty('g', required=True)
crowd_level = ndb.StringProperty('w', required=False)
comments = ndb.TextProperty('n', required=False)
ios_device_id = ndb.StringProperty('d', required=False)
photo_url = ndb.StringProperty('p', required=False)
tags = ndb.StringProperty('t', repeated=True, required=False)
class Search(ndb.Model):
created_at = ndb.DateTimeProperty('c', auto_now=True)
modified_at = ndb.DateTimeProperty('m', auto_now_add=True)
google_places_id = ndb.StringProperty('g', required=True)
ios_device_id = ndb.StringProperty('d', required=False)
name = ndb.StringProperty('n', required=False)
vicinity = ndb.StringProperty('v', required=False)
class Request(ndb.Model):
created_at = ndb.DateTimeProperty('c', auto_now=True)
modified_at = ndb.DateTimeProperty('m', auto_now_add=True)
google_places_id = ndb.StringProperty('g', required=True)
ios_device_id = ndb.StringProperty('d', required=False)
name = ndb.StringProperty('n', required=False)
vicinity = ndb.StringProperty('v', required=False)
questions = ndb.StringProperty('q', repeated=True, required=False)
more_questions = ndb.TextProperty('mq', required=False)
class Call(ndb.Model):
created_at = ndb.DateTimeProperty('c', auto_now=True)
modified_at = ndb.DateTimeProperty('m', auto_now_add=True)
google_places_id = ndb.StringProperty('g', required=True)
ios_device_id = ndb.StringProperty('d', required=False)
name = ndb.StringProperty('n', required=False)
vicinity = ndb.StringProperty('v', required=False)
# class Place(ndb.Model):
# created_at = ndb.DateTimeProperty('c', auto_now=True)
# modified_at = ndb.DateTimeProperty('m', auto_now_add=True)
# google_places_id = ndb.StringProperty('g', required=True) | StarcoderdataPython |
1619013 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 1 13:57:09 2019
@author: Tom
"""
import smtplib
import configparser
import logging
def send_email(config_values, logger, body, subject='Test email'):
''' Send an email from a python script '''
#ascii_body = body.encode('ascii', 'ignore')
try:
sent_from = config_values['gmail.com']['username']
send_to = [x.strip() for x in config_values['gmail.com']['destination_email'].split(',')]
except KeyError:
return False
email_text = "From: %s\r\n" \
"To: %s\r\n" \
"Subject: %s\r\n" \
"\r\n" \
"%s\n\r" % \
(sent_from, ", ".join(send_to), subject, body)
try:
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
logger.debug("Sent Hello")
server.starttls()
server.login(config_values['gmail.com']['username'],
config_values['gmail.com']['password'])
logger.debug("Logged in")
server.sendmail(sent_from, send_to, email_text.encode('utf-8'))
logger.info("Sent email")
server.close()
return True
except smtplib.SMTPException as e_value:
logger.error('Something went wrong, %s', str(e_value))
return False
if __name__ == "__main__":
CONFIG = configparser.ConfigParser()
CONFIG.read('inat_add_obs2project.ini')
LOGGER = logging.getLogger()
LOG_FORMATTER = logging.Formatter("[%(levelname)-5.5s] %(message)s")
CONSOLE_HANDLER = logging.StreamHandler()
CONSOLE_HANDLER.setFormatter(LOG_FORMATTER)
LOGGER.addHandler(CONSOLE_HANDLER)
LOGGER.setLevel(CONFIG['DEFAULT']['loggingLevel'])
send_email(CONFIG, LOGGER, "Test body", subject="test subject")
| StarcoderdataPython |
149070 | coco_file = 'yolo/darknet/coco.names'
yolo_cfg_file = 'yolo/darknet/yolov3-tiny.cfg'
yolo_weights_file = 'yolo/darknet/yolov3-tiny.weights'
img_size = (320,320)
conf_threshold = 0.5
nms_threshold = 0.3 | StarcoderdataPython |
4828330 | from replies import bot_reply, player_reply
class Mediator:
def __init__(self):
self.number_of_players = player_reply("number of players (1 or 2): ")
if self.number_of_players == 1:
self.human = input("select marker (X or O) : ").upper()
else:
self.human = ""
def x_input(self, X_positions, O_positions):
return self.__get_input("X", X_positions, O_positions)
def o_input(self, X_positions, O_positions):
return self.__get_input("O", X_positions, O_positions)
def __get_input(self, input, X_positions, O_positions):
if self.number_of_players == 2 or self.human == input:
return player_reply(input + " move : ")
else :
return bot_reply(X_positions, O_positions) | StarcoderdataPython |
3236968 | <reponame>osoco/better-ways-of-thinking-about-software
"""
Video player in the courseware.
"""
import logging
from bok_choy.javascript import js_defined, wait_for_js
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise # lint-amnesty, pylint: disable=unused-import
log = logging.getLogger('VideoPage')
VIDEO_BUTTONS = {
'transcript': '.language-menu',
'transcript_button': '.toggle-transcript',
'cc_button': '.toggle-captions',
'volume': '.volume',
'play': '.video_control.play',
'pause': '.video_control.pause',
'fullscreen': '.add-fullscreen',
'download_transcript': '.video-tracks > a',
'speed': '.speeds',
'quality': '.quality-control',
'do_not_show_again': '.skip-control',
'skip_bumper': '.play-skip-control',
}
CSS_CLASS_NAMES = {
'captions_closed': '.video.closed',
'captions_rendered': '.video.is-captions-rendered',
'captions': '.subtitles',
'captions_text': '.subtitles li span',
'captions_text_getter': '.subtitles li span[role="link"][data-index="{}"]',
'closed_captions': '.closed-captions',
'error_message': '.video .video-player .video-error',
'video_container': '.video',
'video_sources': '.video-player video source',
'video_spinner': '.video-wrapper .spinner',
'video_xmodule': '.xmodule_VideoBlock',
'video_init': '.is-initialized',
'video_time': '.vidtime',
'video_display_name': '.vert h3',
'captions_lang_list': '.langs-list li',
'video_speed': '.speeds .value',
'poster': '.poster',
'active_caption_text': '.subtitles-menu > li.current span',
}
VIDEO_MODES = {
'html5': '.video video',
'youtube': '.video iframe',
'hls': '.video video',
}
VIDEO_MENUS = {
'language': '.lang .menu',
'speed': '.speed .menu',
'download_transcript': '.video-tracks .a11y-menu-list',
'transcript-format': {
'srt': '.wrapper-download-transcripts .list-download-transcripts .btn-link[data-value="srt"]',
'txt': '.wrapper-download-transcripts .list-download-transcripts .btn-link[data-value="txt"]'
},
'transcript-skip': '.sr-is-focusable.transcript-start',
}
@js_defined('window.Video', 'window.jQuery', 'window.MathJax')
class VideoPage(PageObject):
"""
Video player in the courseware.
"""
url = None
current_video_display_name = None
@wait_for_js
def is_browser_on_page(self):
return self.q(css='div{}'.format(CSS_CLASS_NAMES['video_xmodule'])).present
@wait_for_js
def wait_for_video_class(self):
"""
Wait until element with class name `video` appeared in DOM.
"""
self.wait_for_ajax()
video_selector = '{}'.format(CSS_CLASS_NAMES['video_container'])
self.wait_for_element_presence(video_selector, 'Video is initialized')
@wait_for_js
def wait_for_video_player_render(self, autoplay=False):
"""
Wait until Video Player Rendered Completely.
"""
self.wait_for_video_class()
self.wait_for_element_presence(CSS_CLASS_NAMES['video_init'], 'Video Player Initialized')
self.wait_for_element_presence(CSS_CLASS_NAMES['video_time'], 'Video Player Initialized')
video_player_buttons = ['volume', 'fullscreen', 'speed']
if autoplay:
video_player_buttons.append('pause')
else:
video_player_buttons.append('play')
for button in video_player_buttons:
self.wait_for_element_visibility(VIDEO_BUTTONS[button], f'{button} button is visible')
def _is_finished_loading():
"""
Check if video loading completed.
Returns:
bool: Tells Video Finished Loading.
"""
return not self.q(css=CSS_CLASS_NAMES['video_spinner']).visible
EmptyPromise(_is_finished_loading, 'Finished loading the video', timeout=200).fulfill()
self.wait_for_ajax()
def get_video_vertical_selector(self, video_display_name=None):
"""
Get selector for a video vertical with display name specified by `video_display_name`.
Arguments:
video_display_name (str or None): Display name of a Video. Default vertical selector if None.
Returns:
str: Vertical Selector for video.
"""
if video_display_name:
video_display_names = self.q(css=CSS_CLASS_NAMES['video_display_name']).text
if video_display_name not in video_display_names:
raise ValueError(f"Incorrect Video Display Name: '{video_display_name}'")
return f'.vert.vert-{video_display_names.index(video_display_name)}'
else:
return '.vert.vert-0'
def get_element_selector(self, class_name, vertical=True):
"""
Construct unique element selector.
Arguments:
class_name (str): css class name for an element.
vertical (bool): do we need vertical css selector or not. vertical css selector is not present in Studio
Returns:
str: Element Selector.
"""
if vertical:
return '{vertical} {video_element}'.format(
vertical=self.get_video_vertical_selector(self.current_video_display_name),
video_element=class_name)
else:
return class_name
def use_video(self, video_display_name):
"""
Set current video display name.
Arguments:
video_display_name (str): Display name of a Video.
"""
self.current_video_display_name = video_display_name
def is_button_shown(self, button_id):
"""
Check if a video button specified by `button_id` is visible.
Arguments:
button_id (str): key in VIDEO_BUTTONS dictionary, its value will give us the css selector for button.
Returns:
bool: Tells about a buttons visibility.
"""
selector = self.get_element_selector(VIDEO_BUTTONS[button_id])
return self.q(css=selector).visible
def show_captions(self):
"""
Make Captions Visible.
"""
self._captions_visibility(True)
def is_captions_visible(self):
"""
Get current visibility sate of captions.
Returns:
bool: True means captions are visible, False means captions are not visible
"""
self.wait_for_ajax()
caption_state_selector = self.get_element_selector(CSS_CLASS_NAMES['captions'])
return self.q(css=caption_state_selector).visible
@wait_for_js
def _captions_visibility(self, captions_new_state):
"""
Set the video captions visibility state.
Arguments:
captions_new_state (bool): True means show captions, False means hide captions
"""
states = {True: 'Shown', False: 'Hidden'}
state = states[captions_new_state]
# Make sure that the transcript button is there
EmptyPromise(lambda: self.is_button_shown('transcript_button'),
"transcript button is shown").fulfill()
# toggle captions visibility state if needed
if self.is_captions_visible() != captions_new_state:
self.click_player_button('transcript_button') # lint-amnesty, pylint: disable=no-member
# Verify that captions state is toggled/changed
EmptyPromise(lambda: self.is_captions_visible() == captions_new_state,
f"Transcripts are {state}").fulfill()
| StarcoderdataPython |
3387043 | import wx
import wx.aui
from GraphicsCanvas import GraphicsCanvas
from TreeCanvas import TreeCanvas
from InputModeCanvas import InputModeCanvas
from PropertiesCanvas import PropertiesCanvas
from ObjPropsCanvas import ObjPropsCanvas
from Ribbon import Ribbon
from HeeksConfig import HeeksConfig
class Frame(wx.Frame):
def __init__(self, parent, id=-1, pos=wx.DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE, name=wx.FrameNameStr):
wx.Frame.__init__(self, parent, id, '', pos, size, style, name)
config = HeeksConfig()
self.aui_manager = None
self.SetIcon(wx.Icon(wx.GetApp().cad_dir + "/heekscad.png", wx.BITMAP_TYPE_PNG))
self.aui_manager = wx.aui.AuiManager()
self.aui_manager.SetManagedWindow(self)
self.graphics_canvas = self.MakeGraphicsCanvas()
self.aui_manager.AddPane(self.graphics_canvas, wx.aui.AuiPaneInfo().Name('graphics').CentrePane().BestSize(wx.Size(800,800)))
self.tree_canvas = TreeCanvas(self)
self.aui_manager.AddPane(self.tree_canvas, wx.aui.AuiPaneInfo().Name('Objects').Caption('Objects').Left().BestSize(wx.Size(300,400)).Position(0))
self.input_mode_canvas = InputModeCanvas(self)
self.aui_manager.AddPane(self.input_mode_canvas, wx.aui.AuiPaneInfo().Name('Input').Caption('Input').Left().BestSize(wx.Size(300,200)).Position(0))
self.properties_canvas = ObjPropsCanvas(self)
self.aui_manager.AddPane(self.properties_canvas, wx.aui.AuiPaneInfo().Name('Properties').Caption('Properties').Left().BestSize(wx.Size(300,200)).Position(2))
wx.GetApp().AddExtraWindows(self)
self.ribbon = Ribbon(self)
self.aui_manager.AddPane(self.ribbon, wx.aui.AuiPaneInfo().ToolbarPane().Name('Ribbon').Top().Movable(False).Gripper(False))
wx.GetApp().RegisterHideableWindow(self.tree_canvas)
wx.GetApp().RegisterHideableWindow(self.input_mode_canvas)
wx.GetApp().RegisterHideableWindow(self.properties_canvas)
wx.GetApp().RegisterHideableWindow(self.ribbon)
perspective = config.Read('AuiPerspective', 'default')
if perspective != 'default':
self.aui_manager.LoadPerspective(perspective)
self.ribbon.SetHeightAndImages()
maximised = config.ReadBool('AuiMaximised', False)
if maximised:
self.Maximize()
self.aui_manager.Update()
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_MOVE, self.OnMove)
self.Bind(wx.EVT_CLOSE, self.OnClose)
self.gears = []
def MakeGraphicsCanvas(self):
return GraphicsCanvas(self)
def __del__(self):
if self.aui_manager:
str = self.aui_manager.SavePerspective()
config = HeeksConfig()
config.Write('AuiPerspective', str)
def OnSize(self, e):
size = e.GetSize()
config = HeeksConfig()
config.WriteInt('MainFrameWidth', size.GetWidth())
config.WriteInt('MainFrameHeight', size.GetHeight())
def OnMove(self, e):
pos = self.GetPosition()
config = HeeksConfig()
config.WriteInt('MainFramePosX', pos.x)
config.WriteInt('MainFramePosY', pos.y)
def OnClose(self, e):
if e.CanVeto() and wx.GetApp().CheckForModifiedDoc() == wx.CANCEL:
e.Veto()
return
e.Skip()
config = HeeksConfig()
config.WriteBool('AuiMaximised', self.IsMaximized())
self.aui_manager.UnInit()
def SetFrameTitle(self):
s = wx.GetApp().GetAppTitle() + ' - '
#s = self.GetTitle() + ' - '
if wx.GetApp().filepath:
s += wx.GetApp().filepath
else:
s += 'Untitled'
self.SetTitle(s)
| StarcoderdataPython |
3323326 | <filename>twitter_credentials.py
ACCESS_TOKEN = "Enter your access token here"
ACCESS_TOKEN_SECRET = "Enter your access token secret here"
CONSUMER_KEY = "Enter your consumer key here"
CONSUMER_SECRET = "Enter your consumer secret here"
| StarcoderdataPython |
1736148 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyGeocube(PythonPackage):
"""Tool to convert geopandas vector data into rasterized xarray data."""
homepage = "https://github.com/corteva/geocube"
pypi = "geocube/geocube-0.0.17.tar.gz"
maintainers = ['adamjstewart']
version('0.0.17', sha256='bf8da0fa96d772ebaea0b98bafa0ba5b8639669d5feb07465d4255af177bddc0')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-appdirs', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-datacube', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-rasterio', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-pyproj@2:', type=('build', 'run'))
| StarcoderdataPython |
183053 | <reponame>sainjusajan/django-oscar
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class CommunicationsDashboardConfig(AppConfig):
label = 'communications_dashboard'
name = 'oscar.apps.dashboard.communications'
verbose_name = _('Communications dashboard')
| StarcoderdataPython |
3328465 | <reponame>johnnycakes79/pyops
"""
This module provides a series of time/date utilities.
"""
from __future__ import print_function
import spiceypy as spice
from datetime import datetime, timedelta
def oem_to_datetime(oem_time_string):
"""
converts oem datetime record to python datetime object
Args:
oem_time (string): datetime string
"""
date, time = oem_time_string.split('T')
year, month, day = date.split('-')
hour, minute, second_fraction = time.split(':')
second, fraction = second_fraction.split('.')
return datetime(int(year), int(month), int(day),
int(hour), int(minute), int(second),
int(fraction[0:3])*1000)
def datetime_to_et(dtime, scale='UTC'):
"""
convert a python datetime to SPICE ephemerides seconds (TBD)
Args:
dtime (datetime): python datetime
scale (str, optional): time scale of input time (default: UTC)
Returns:
float: SPICE ephemerides sceonds (TBD)
"""
return spice.str2et(dtime.strftime(
'%m/%d/%y %H:%M:%S.%f ({})'.format(scale)))
def et_to_datetime(et, scale='TDB'):
"""
convert a SPICE ephemerides epoch (TBD seconds) to a python datetime
object. The default time scale returned will be TDB but can be set
to any of the accepted SPICE time scales.
Args:
et (float): SPICE ephemerides sceonds (TBD)
scale (str, optional): time scale of output time (default: TDB)
Returns:
datetime: python datetime
"""
t = spice.timout(et, 'YYYY-MON-DD HR:MN:SC.### ::{}'.format(scale), 41)
return datetime.strptime(t, '%Y-%b-%d %H:%M:%S.%f')
def et_to_utc(et):
"""Summary
convert SPICE epoch in Ephemerides seconds (TDB) to a
UTC time string.
Args:
et (float): SPICE epoch in Ephemerides seconds (TDB)
Returns:
string: UTC time
"""
return spice.et2utc(et, 'ISOC', 3, 30)
def itl_to_datetime(itltime):
"""
convert EPS ITL time format to python datetime object
Args:
itltime (string): EPS ITL time string formt
Returns:
datetime: python datetime
"""
return datetime.strptime(itltime, '%d-%b-%Y_%H:%M:%S')
def xldate_to_datetime(xldate):
"""
convert an Excel format time to python datetime object
Args:
xldate (float): days in Excel format
Returns:
datetime: python datetime
"""
temp = datetime(1900, 1, 1)
delta = timedelta(days=xldate)
return temp+delta
# def mjd20002datetime(mjd2000):
# y, m, d, fd = jdcal.jd2gcal(
# jdcal.MJD_0,jdcal.MJD_JD2000+float(mjd2000)-0.5)
# hour = 24* fd
# mins = 60*(hour - int(hour))
# sec = 60*(mins - int(mins))
# usec = 1000000*(sec-int(sec))
# return dt(y, m, d, int(hour), int(mins), int(sec), int(usec))
# def datetime2et(dtime):
# return spice.str2et(dtime.strftime("%m/%d/%y %H:%M:%S.%f"))
# def mjd20002et(mjd2000):
# return datetime2et(mjd20002datetime(mjd2000))
# def et2mjd2000(et):
# return float(spice.et2utc(et, 'J', 7, 30).split(' ')[1]) - \
# jdcal.MJD_0 - jdcal.MJD_JD2000 + 0.5
# def mjd20002datetime(mjd2000):
# y, m, d, fd = jdcal.jd2gcal(jdcal.MJD_0,jdcal.MJD_JD2000+float(mjd2000)-0.5)
# hour = 24* fd
# mins = 60*(hour - int(hour))
# sec = 60*(mins - int(mins))
# usec = 1000000*(sec-int(sec))
# return dt(y, m, d, int(hour), int(mins), int(sec), int(usec))
# def datetime2et(dtime):
# return spice.str2et(dtime.strftime("%m/%d/%y %H:%M:%S.%f"))
# def mjd20002et(mjd2000):
# return datetime2et(mjd20002datetime(mjd2000))
# def et2utc(et):
# return spice.et2utc(et, 'ISOC', 3, 30)
# def et2datetime(et):
# utc = et2utc(et)
# utc_date, utc_time = utc.split('T')
# y, m, d = utc_date.split('-')
# hour, mins, sec = utc_time.split(':')
# sec, usec = sec.split('.')
# return dt(int(y), int(m), int(d), int(hour), int(mins), int(sec), int(usec))
# def et2mjd2000(et):
# return float(spice.et2utc(et, 'J', 7, 30).split(' ')[1]) - jdcal.MJD_0 - jdcal.MJD_JD2000 + 0.5
# Main function
def main():
"""
does nothing for now...
"""
print('This is a random collection of functions... TBS - to be sorted.')
if __name__ == "__main__":
main()
| StarcoderdataPython |
1625609 | # Modified from open-mmlab/mmcv
import os
import cv2
from cv2 import VideoWriter_fourcc
from mmcv.utils import check_file_exist, mkdir_or_exist, scandir, track_progress
def frames2video(frame_dir,
video_file,
file_names,
fps=20,
fourcc='XVID',
show_progress=True):
"""Read the frame images from a directory and join them as a video.
Args:
frame_dir (str): The directory containing video frames.
video_file (str): Output filename.
file_names (list[str]): Image files
fps (float): FPS of the output video.
fourcc (str): Fourcc of the output video, this should be compatible
with the output file type.
show_progress (bool): Whether to show a progress bar.
"""
first_file = os.path.join(frame_dir, file_names[0])
check_file_exist(first_file, 'The start frame not found: ' + first_file)
img = cv2.imread(first_file)
height, width = img.shape[:2]
resolution = (width, height)
vwriter = cv2.VideoWriter(video_file, VideoWriter_fourcc(*fourcc), fps, resolution)
def write_frame(file_idx):
filename = os.path.join(frame_dir, file_names[file_idx])
img = cv2.imread(filename)
vwriter.write(img)
if show_progress:
track_progress(write_frame, range(len(file_names)))
else:
for i in range(len(file_names)):
write_frame(i)
vwriter.release()
| StarcoderdataPython |
1654871 | <gh_stars>1-10
from fastdtw import fastdtw
from sklearn.metrics import euclidean_distances
import pandas as pd
import numpy as np
import pickle
class DtwKnn(object):
def __init__(self, n_neighbors=1, dist=euclidean_distances):
self.n_neighbors = n_neighbors
self.dist = dist
self.templates = []
self.label = []
def add_example(self, template, label):
"""
add single example
:param template:
:param label:
:return:
"""
self.templates.append(template)
self.label.append(label)
def predict(self, x):
distances = []
for template, label in zip(self.templates, self.label):
distance, path = fastdtw(template, x)
distances.append([distance, label])
# print(distances)
df_temp = pd.DataFrame(distances, columns=['distance', 'label'])
df_temp = df_temp.sort_values('distance', axis=0).values
statistic = dict()
for l in self.label:
if l not in statistic.keys():
statistic[l] = [0, np.inf]
for i in range(self.n_neighbors):
statistic[df_temp[i, 1]][0] += 1
t = statistic[df_temp[i, 1]][1]
statistic[df_temp[i, 1]][1] = t if t < df_temp[i, 0] else df_temp[i, 0]
print(statistic)
label = list()
prob = list()
for key, value in statistic.items():
label.append(key)
prob.append(value[0]/(self.n_neighbors * value[1]))
# print(label, prob)
idx_max = np.argmax(prob)
return label[idx_max]
# dt = DtwKnn(n_neighbors=3)
# dt = pickle.load(open('model_saved/dtwknn.model', 'rb'))
# for t, l in zip(dt.templates, dt.label):
# print(t, l)
# df = pd.read_csv('data/templace.csv', header=None, names=['ax', 'ay', 'az'])
# num_sample = len(df)/20
# print(num_sample)
# for i in range(int(num_sample)):
# a = df[i*20 : (i+1)*20].values
# print(a.shape)
# dt.add_example(a, 'left')
#
#
# pickle.dump(dt, open('model_saved/dtwknn.model', 'wb'))
| StarcoderdataPython |
1725586 | <filename>python/cgp_generic_utils/python/__init__.py<gh_stars>1-10
"""
python management functions
"""
# imports local
from ._module import deleteModules, import_
__all__ = ['deleteModules', 'import_']
| StarcoderdataPython |
141334 | <filename>cqd/base.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
import torch.nn as nn
from torch import optim, Tensor
import math
from cqd.util import query_to_atoms
import cqd.discrete as d2
from typing import Tuple, List, Optional, Dict
class N3:
def __init__(self, weight: float):
self.weight = weight
def forward(self, factors):
norm = 0
for f in factors:
norm += self.weight * torch.sum(torch.abs(f) ** 3)
return norm / factors[0].shape[0]
class CQD(nn.Module):
MIN_NORM = 'min'
PROD_NORM = 'prod'
NORMS = {MIN_NORM, PROD_NORM}
def __init__(self,
nentity: int,
nrelation: int,
rank: int,
init_size: float = 1e-3,
reg_weight: float = 1e-2,
test_batch_size: int = 1,
method: str = 'discrete',
t_norm_name: str = 'prod',
k: int = 5,
query_name_dict: Optional[Dict] = None,
do_sigmoid: bool = False,
do_normalize: bool = False,
use_cuda: bool = False):
super(CQD, self).__init__()
self.rank = rank
self.nentity = nentity
self.nrelation = nrelation
self.method = method
self.t_norm_name = t_norm_name
self.k = k
self.query_name_dict = query_name_dict
sizes = (nentity, nrelation)
self.embeddings = nn.ModuleList([nn.Embedding(s, 2 * rank, sparse=True) for s in sizes[:2]])
self.embeddings[0].weight.data *= init_size
self.embeddings[1].weight.data *= init_size
self.init_size = init_size
self.loss_fn = nn.CrossEntropyLoss(reduction='mean')
self.regularizer = N3(reg_weight)
self.do_sigmoid = do_sigmoid
self.do_normalize = do_normalize
self.use_cuda = use_cuda
self.batch_entity_range = torch.arange(nentity).to(torch.float).repeat(test_batch_size, 1)
if self.use_cuda is True:
self.batch_entity_range = self.batch_entity_range.cuda()
def split(self,
lhs_emb: Tensor,
rel_emb: Tensor,
rhs_emb: Tensor) -> Tuple[Tuple[Tensor, Tensor], Tuple[Tensor, Tensor], Tuple[Tensor, Tensor]]:
lhs = lhs_emb[..., :self.rank], lhs_emb[..., self.rank:]
rel = rel_emb[..., :self.rank], rel_emb[..., self.rank:]
rhs = rhs_emb[..., :self.rank], rhs_emb[..., self.rank:]
return lhs, rel, rhs
def loss(self,
triples: Tensor) -> Tensor:
(scores_o, scores_s), factors = self.score_candidates(triples)
l_fit = self.loss_fn(scores_o, triples[:, 2]) + self.loss_fn(scores_s, triples[:, 0])
l_reg = self.regularizer.forward(factors)
return l_fit + l_reg
def score_candidates(self,
triples: Tensor) -> Tuple[Tuple[Tensor, Tensor], Optional[List[Tensor]]]:
lhs_emb = self.embeddings[0](triples[:, 0])
rel_emb = self.embeddings[1](triples[:, 1])
rhs_emb = self.embeddings[0](triples[:, 2])
to_score = self.embeddings[0].weight
scores_o, _ = self.score_o(lhs_emb, rel_emb, to_score)
scores_s, _ = self.score_s(to_score, rel_emb, rhs_emb)
lhs, rel, rhs = self.split(lhs_emb, rel_emb, rhs_emb)
factors = self.get_factors(lhs, rel, rhs)
return (scores_o, scores_s), factors
def score_o(self,
lhs_emb: Tensor,
rel_emb: Tensor,
rhs_emb: Tensor,
return_factors: bool = False) -> Tuple[Tensor, Optional[List[Tensor]]]:
lhs, rel, rhs = self.split(lhs_emb, rel_emb, rhs_emb)
score_1 = (lhs[0] * rel[0] - lhs[1] * rel[1]) @ rhs[0].transpose(-1, -2)
score_2 = (lhs[1] * rel[0] + lhs[0] * rel[1]) @ rhs[1].transpose(-1, -2)
factors = self.get_factors(lhs, rel, rhs) if return_factors else None
return score_1 + score_2, factors
def score_s(self,
lhs_emb: Tensor,
rel_emb: Tensor,
rhs_emb: Tensor,
return_factors: bool = False) -> Tuple[Tensor, Optional[List[Tensor]]]:
lhs, rel, rhs = self.split(lhs_emb, rel_emb, rhs_emb)
score_1 = (rhs[0] * rel[0] + rhs[1] * rel[1]) @ lhs[0].transpose(-1, -2)
score_2 = (rhs[1] * rel[0] - rhs[0] * rel[1]) @ lhs[1].transpose(-1, -2)
factors = self.get_factors(lhs, rel, rhs) if return_factors else None
return score_1 + score_2, factors
def get_factors(self,
lhs: Tuple[Tensor, Tensor],
rel: Tuple[Tensor, Tensor],
rhs: Tuple[Tensor, Tensor]) -> List[Tensor]:
factors = []
for term in (lhs, rel, rhs):
factors.append(torch.sqrt(term[0] ** 2 + term[1] ** 2))
return factors
def get_full_embeddings(self, queries: Tensor) \
-> Tuple[Optional[Tensor], Optional[Tensor], Optional[Tensor]]:
lhs = rel = rhs = None
if torch.sum(queries[:, 0]).item() > 0:
lhs = self.embeddings[0](queries[:, 0])
if torch.sum(queries[:, 1]).item() > 0:
rel = self.embeddings[1](queries[:, 1])
if torch.sum(queries[:, 2]).item() > 0:
rhs = self.embeddings[0](queries[:, 2])
return lhs, rel, rhs
def batch_t_norm(self, scores: Tensor) -> Tensor:
if self.t_norm_name == CQD.MIN_NORM:
scores = torch.min(scores, dim=1)[0]
elif self.t_norm_name == CQD.PROD_NORM:
scores = torch.prod(scores, dim=1)
else:
raise ValueError(f't_norm must be one of {CQD.NORMS}, got {self.t_norm_name}')
return scores
def batch_t_conorm(self, scores: Tensor) -> Tensor:
if self.t_norm_name == CQD.MIN_NORM:
scores = torch.max(scores, dim=1, keepdim=True)[0]
elif self.t_norm_name == CQD.PROD_NORM:
scores = torch.sum(scores, dim=1, keepdim=True) - torch.prod(scores, dim=1, keepdim=True)
else:
raise ValueError(f't_norm must be one of {CQD.NORMS}, got {self.t_norm_name}')
return scores
def reduce_query_score(self, atom_scores, conjunction_mask, negation_mask):
batch_size, num_atoms, *extra_dims = atom_scores.shape
atom_scores = torch.sigmoid(atom_scores)
scores = atom_scores.clone()
scores[negation_mask] = 1 - atom_scores[negation_mask]
disjunctions = scores[~conjunction_mask].reshape(batch_size, -1, *extra_dims)
conjunctions = scores[conjunction_mask].reshape(batch_size, -1, *extra_dims)
if disjunctions.shape[1] > 0:
disjunctions = self.batch_t_conorm(disjunctions)
conjunctions = torch.cat([disjunctions, conjunctions], dim=1)
t_norm = self.batch_t_norm(conjunctions)
return t_norm
def forward(self,
positive_sample,
negative_sample,
subsampling_weight,
batch_queries_dict: Dict[Tuple, Tensor],
batch_idxs_dict):
all_idxs = []
all_scores = []
scores = None
for query_structure, queries in batch_queries_dict.items():
batch_size = queries.shape[0]
atoms, num_variables, conjunction_mask, negation_mask = query_to_atoms(query_structure, queries)
all_idxs.extend(batch_idxs_dict[query_structure])
# [False, True]
target_mask = torch.sum(atoms == -num_variables, dim=-1) > 0
# Offsets identify variables across different batches
var_id_offsets = torch.arange(batch_size, device=atoms.device) * num_variables
var_id_offsets = var_id_offsets.reshape(-1, 1, 1)
# Replace negative variable IDs with valid identifiers
vars_mask = atoms < 0
atoms_offset_vars = -atoms + var_id_offsets
atoms[vars_mask] = atoms_offset_vars[vars_mask]
head, rel, tail = atoms[..., 0], atoms[..., 1], atoms[..., 2]
head_vars_mask = vars_mask[..., 0]
with torch.no_grad():
h_emb_constants = self.embeddings[0](head)
r_emb = self.embeddings[1](rel)
if 'continuous' in self.method:
h_emb = h_emb_constants
if num_variables > 1:
# var embedding for ID 0 is unused for ease of implementation
var_embs = nn.Embedding((num_variables * batch_size) + 1, self.rank * 2)
var_embs.weight.data *= self.init_size
var_embs.to(atoms.device)
optimizer = optim.Adam(var_embs.parameters(), lr=0.1)
prev_loss_value = 1000
loss_value = 999
i = 0
# CQD-CO optimization loop
while i < 1000 and math.fabs(prev_loss_value - loss_value) > 1e-9:
prev_loss_value = loss_value
h_emb = h_emb_constants.clone()
# Fill variable positions with optimizable embeddings
h_emb[head_vars_mask] = var_embs(head[head_vars_mask])
t_emb = var_embs(tail)
scores, factors = self.score_o(h_emb.unsqueeze(-2),
r_emb.unsqueeze(-2),
t_emb.unsqueeze(-2),
return_factors=True)
query_score = self.reduce_query_score(scores,
conjunction_mask,
negation_mask)
loss = - query_score.mean() + self.regularizer.forward(factors)
loss_value = loss.item()
optimizer.zero_grad()
loss.backward()
optimizer.step()
i += 1
with torch.no_grad():
# Select predicates involving target variable only
conjunction_mask = conjunction_mask[target_mask].reshape(batch_size, -1)
negation_mask = negation_mask[target_mask].reshape(batch_size, -1)
target_mask = target_mask.unsqueeze(-1).expand_as(h_emb)
emb_size = h_emb.shape[-1]
h_emb = h_emb[target_mask].reshape(batch_size, -1, emb_size)
r_emb = r_emb[target_mask].reshape(batch_size, -1, emb_size)
to_score = self.embeddings[0].weight
scores, factors = self.score_o(h_emb, r_emb, to_score)
query_score = self.reduce_query_score(scores,
conjunction_mask,
negation_mask)
all_scores.append(query_score)
scores = torch.cat(all_scores, dim=0)
elif 'discrete' in self.method:
graph_type = self.query_name_dict[query_structure]
def t_norm(a: Tensor, b: Tensor) -> Tensor:
return torch.minimum(a, b)
def t_conorm(a: Tensor, b: Tensor) -> Tensor:
return torch.maximum(a, b)
if self.t_norm_name == CQD.PROD_NORM:
def t_norm(a: Tensor, b: Tensor) -> Tensor:
return a * b
def t_conorm(a: Tensor, b: Tensor) -> Tensor:
return 1 - ((1 - a) * (1 - b))
def normalize(scores_: Tensor) -> Tensor:
scores_ = scores_ - scores_.min(1, keepdim=True)[0]
scores_ = scores_ / scores_.max(1, keepdim=True)[0]
return scores_
def scoring_function(rel_: Tensor, lhs_: Tensor, rhs_: Tensor) -> Tensor:
res, _ = self.score_o(lhs_, rel_, rhs_)
if self.do_sigmoid is True:
res = torch.sigmoid(res)
if self.do_normalize is True:
res = normalize(res)
return res
if graph_type == "1p":
scores = d2.query_1p(entity_embeddings=self.embeddings[0],
predicate_embeddings=self.embeddings[1],
queries=queries,
scoring_function=scoring_function)
elif graph_type == "2p":
scores = d2.query_2p(entity_embeddings=self.embeddings[0],
predicate_embeddings=self.embeddings[1],
queries=queries,
scoring_function=scoring_function,
k=self.k, t_norm=t_norm)
elif graph_type == "3p":
scores = d2.query_3p(entity_embeddings=self.embeddings[0],
predicate_embeddings=self.embeddings[1],
queries=queries,
scoring_function=scoring_function,
k=self.k, t_norm=t_norm)
elif graph_type == "2i":
scores = d2.query_2i(entity_embeddings=self.embeddings[0],
predicate_embeddings=self.embeddings[1],
queries=queries,
scoring_function=scoring_function, t_norm=t_norm)
elif graph_type == "3i":
scores = d2.query_3i(entity_embeddings=self.embeddings[0],
predicate_embeddings=self.embeddings[1],
queries=queries,
scoring_function=scoring_function, t_norm=t_norm)
elif graph_type == "pi":
scores = d2.query_pi(entity_embeddings=self.embeddings[0],
predicate_embeddings=self.embeddings[1],
queries=queries,
scoring_function=scoring_function,
k=self.k, t_norm=t_norm)
elif graph_type == "ip":
scores = d2.query_ip(entity_embeddings=self.embeddings[0],
predicate_embeddings=self.embeddings[1],
queries=queries,
scoring_function=scoring_function,
k=self.k, t_norm=t_norm)
elif graph_type == "2u-DNF":
scores = d2.query_2u_dnf(entity_embeddings=self.embeddings[0],
predicate_embeddings=self.embeddings[1],
queries=queries,
scoring_function=scoring_function,
t_conorm=t_conorm)
elif graph_type == "up-DNF":
scores = d2.query_up_dnf(entity_embeddings=self.embeddings[0],
predicate_embeddings=self.embeddings[1],
queries=queries,
scoring_function=scoring_function,
k=self.k, t_norm=t_norm, t_conorm=t_conorm)
else:
raise ValueError(f'Unknown query type: {graph_type}')
return None, scores, None, all_idxs
| StarcoderdataPython |
3330251 | <reponame>zama201bc/LanguageCards
import datetime
from LanguageDeck.models import Cards
"""Interface for Card and Deck objects. Contains the obvious observer, creator and mutator functions"""
def get_text(card):
return str(card.text)
def get_translations(card):
return card.translations
def get_score(session, user, card):
if card.scores:
s = session.query(card.score_type).filter_by(user_id=user.id, card_id=card.id).one()
return int(s.score)
else:
return None
def get_examples(card):
return card.examples
def get_creation_date(card):
if card.date_created:
d = card.date_created
return datetime.date(d.year, d.month, d.day)
return None
def get_touch_date(card):
if card.date_last_touched:
d = card.date_last_touched
return datetime.date(d.year, d.month, d.day)
return None
def get_g_type(card):
return str(card.g_type)
def get_paradigms(card):
return card.grammar
def get_paradigm_content(card):
assert type(card) == Cards.Grammar, "card must be of type Grammar"
return str(card.paradigm)
def edit_text(card, edit):
assert type(edit) == str, "edit must be a string."
card.text = edit
def edit_score(session, card, user, score):
query = session.query(card.score_type).filter_by(user_id=user.id, card_id=card.id)
if query.count() == 0:
score = card.score_type(card_id=card.id, user_id=user.id, score=score)
session.add(score)
else:
q = query.one()
q.score = score
def edit_date_touched(card, date):
card.date_last_touched = date
def edit_g_paradigm(card, edit):
assert type(card) == Cards.Grammar, "card type needs to be Grammar"
card.paradigm = edit
def add_example(card, example):
assert type(example) == card.example_type, "incorrect example type"
card.examples.append(example)
def add_translation(card, translation):
assert Cards.translation_types[type(card)] == type(translation), "wrong card type for translation"
card.translations.append(translation)
def add_grammar(card, grammar):
assert type(grammar) == Cards.Grammar, "Can only insert Grammar class"
assert type(card) == Cards.LanguageBVocab, "Only LanguageBVocab cards have a grammar property"
card.grammar.append(grammar)
def remove_grammar(card, grammar):
assert type(grammar) == Cards.Grammar, "Can only insert Grammar class"
assert type(card) == Cards.LanguageBVocab, "Only LanguageBVocab cards have a grammar property"
assert grammar in card.grammar, "grammar object not in card"
card.grammar.remove(grammar)
def remove_example(card, example):
assert example in card.examples, "example not in card"
card.examples.remove(example)
def remove_translation(card, translation):
assert translation in card.translations, "translation not in card"
card.translations.remove(translation)
| StarcoderdataPython |
1726836 | class Regressor(object):
def __init__(self):
pass
def predict(X):
pass
def fit(X,Y):
pass
class TFRegressor(Regressor):
def __init__(self,x_plh,y_plh,output_op,train_op,session,copy_op=None):
self.x = x_plh
self.y = y_plh
self.output_op = output_op
self.train = train_op
self.session = session
self.copy_op = copy_op
def predict(self,X):
return self.session.run(self.output_op,{self.x:X})
def fit(self,X,Y):
self.session.run(self.train, {self.x:X,self.y:Y})
def copy(self):
if self.copy_op == None:
raise RuntimeError('No copy op specified')
self.session.run(self.copy_op)
| StarcoderdataPython |
3308928 | <reponame>OdatNurd/HyperHelpAuthor
import sublime
import sublime_plugin
from ..linter_base import LinterBase
###----------------------------------------------------------------------------
class MissingHelpSourceLinter(LinterBase):
"""
Lint the help index to determine if the list of help files listed in the
index matches the list of help files that exist for the package.
"""
def __init__(self, pkg_info):
super().__init__(pkg_info)
root = "Packages/%s/" % (self.pkg_info.doc_root)
d_files = {file[len(root):] for file in sublime.find_resources("*.txt")
if file.startswith(root)}
i_files = {key for key in self.pkg_info.help_files.keys()}
for file in d_files - i_files:
self.add_index(
"warning",
"Help file '%s' is in Packages/%s/ but missing from the index",
file, self.pkg_info.doc_root)
for file in i_files - d_files:
self.add_index(
"error",
"Help file '%s' is in the index but not in Packages/%s/",
file, self.pkg_info.doc_root)
###----------------------------------------------------------------------------
| StarcoderdataPython |
1719274 | <reponame>exyi/ILSpy
# Copyright (c) 2011 AlphaSierraPapa for the SharpDevelop Team
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
# to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from System import *
from System.Collections.Generic import *
class NavigationHistory(object):
""" <summary>
Stores the navigation history.
</summary>
"""
def __init__(self):
""" <summary>
Stores the navigation history.
</summary>
"""
self._NavigationSecondsBeforeNewEntry = 0.5
self._lastNavigationTime = DateTime.MinValue
self._back = List[T]()
self._forward = List[T]()
def get_CanNavigateBack(self):
return self._back.Count > 0
CanNavigateBack = property(fget=get_CanNavigateBack)
def get_CanNavigateForward(self):
return self._forward.Count > 0
CanNavigateForward = property(fget=get_CanNavigateForward)
def GoBack(self):
self._forward.Add(self._current)
self._current = self._back[self._back.Count - 1]
self._back.RemoveAt(self._back.Count - 1)
return self._current
def GoForward(self):
self._back.Add(self._current)
self._current = self._forward[self._forward.Count - 1]
self._forward.RemoveAt(self._forward.Count - 1)
return self._current
def RemoveAll(self, predicate):
self._back.RemoveAll(predicate)
self._forward.RemoveAll(predicate)
def Clear(self):
self._back.Clear()
self._forward.Clear()
def UpdateCurrent(self, node):
self._current = node
def Record(self, node):
navigationTime = DateTime.Now
period = navigationTime - self._lastNavigationTime
if period.TotalSeconds < self._NavigationSecondsBeforeNewEntry:
self._current = node
else:
if self._current != None:
self._back.Add(self._current)
# We only store a record once, and ensure it is on the top of the stack, so we just remove the old record
self._back.Remove(node)
self._current = node
self._forward.Clear()
self._lastNavigationTime = navigationTime | StarcoderdataPython |
3381953 | from django.apps import AppConfig
class ScrumboardConfig(AppConfig):
name = 'scrumboard' | StarcoderdataPython |
1651774 | import requests
import MySQLdb
import re
from bs4 import BeautifulSoup
from selenium.webdriver.support.select import Select
from selenium import webdriver
from time import sleep
import datetime
import os
class IfisScraping():
def __init__(self):
self.wadaiurl = "https://kabutan.jp/news/marketnews/?category=9"
options = webdriver.ChromeOptions()
# options.add_argument('--headless')
self.driver = webdriver.Chrome(options=options)
def main(self):
conn = MySQLdb.connect(
host='localhost',
port=3306,
user='root',
password='',
database='stockdatabase',
use_unicode=True,
charset="utf8"
)
cur = conn.cursor()
wadai_table = 'wadai_data'
topfifiteen_table = 'topfifteens'
stockchange_table = 'stockchanges'
stockdataabase_table = 'stockdatabases'
lionnote_table = 'lionnotes'
cur.execute('SELECT * FROM %s;' % wadai_table)
wadai_datas = cur.fetchall()
cur.execute('SELECT * FROM %s;' % topfifiteen_table)
topfifiteen_datas = cur.fetchall()
cur.execute('SELECT * FROM %s;' % stockchange_table)
stockchange_datas = cur.fetchall()
cur.execute('SELECT * FROM %s;' % stockdataabase_table)
stockdatabase_datas = cur.fetchall()
cur.execute('SELECT * FROM %s;' % lionnote_table)
lionnote_datas = cur.fetchall()
stock_number_array = []
for wadai_data in wadai_datas:
stock_number_array.append(str(wadai_data[2]))
for topfifiteen_data in topfifiteen_datas:
for i in range(2, 27):
if (i == 17 or i == 18):
continue
if (topfifiteen_data[i] == None):
null_number = i
break
stock = topfifiteen_data[i]
stock_number = re.search('\d{4}', stock)
stock_number = stock_number.group(0)
stock_number_array.append(stock_number)
for stockchange_data in stockchange_datas:
for i in range(2,26):
if (stockchange_data[i] == None):
break
stock = stockchange_data[i]
stock_number = re.search('\d{4}', stock)
stock_number = stock_number.group(0)
stock_number_array.append(stock_number)
for stockdatabase_data in stockdatabase_datas:
stock_number_array.append(str(stockdatabase_data[1]))
for lionnote_data in lionnote_datas:
stock_number_array.append(str(lionnote_data[4]))
stock_number_array = list(dict.fromkeys(stock_number_array))
driver = self.driver
for stock_number in stock_number_array:
ifisUrl = 'https://kabuyoho.ifis.co.jp/index.php?action=tp1&sa=report&bcode=' + stock_number
print(ifisUrl)
driver.get(ifisUrl)
sleep(4)
driver.set_window_size(634, 874)
driver.execute_script("window.scrollTo(10, 144);")
sleep(2)
driver.save_screenshot('ifis_' + stock_number + '.png')
sleep(1)
conn.commit()
cur.close()
conn.close()
driver.quit()
if __name__ == "__main__":
IfisScraping().main()
| StarcoderdataPython |
27054 | <gh_stars>1000+
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from botocore.session import Session
from tests import unittest
class TestTaggedUnionsUnknown(unittest.TestCase):
def test_tagged_union_member_name_does_not_coincide_with_unknown_key(self):
# This test ensures that operation models do not use SDK_UNKNOWN_MEMBER
# as a member name. Thereby reserving SDK_UNKNOWN_MEMBER for the parser to
# set as a key on the reponse object. This is necessary when the client
# encounters a member that it is unaware of or not modeled.
session = Session()
for service_name in session.get_available_services():
service_model = session.get_service_model(service_name)
for shape_name in service_model.shape_names:
shape = service_model.shape_for(shape_name)
if hasattr(shape, 'is_tagged_union') and shape.is_tagged_union:
self.assertNotIn('SDK_UNKNOWN_MEMBER', shape.members)
| StarcoderdataPython |
4813194 | <reponame>leerbennett/SMPAgentryEvent
#!/usr/bin/env python3
"""
Copyright (c) 2019 <NAME>
See the LICENSE file for license rights and limitations (MIT).
This script analyzes an Agentry event.log file.
The main goal is to determine the number of occurances of known error messages.
Key features implemented include:
* When not showing all details, hide any groups with leading underscore in name
* When not showing all details, Hide all output for errors that don't occur.
Current Limitations/future improvment ideas
*
* Show error descriptions in more user oriented way.
* Show Error detail level
* Report on errors by category, e.g. Locked
* Show # of sub errors and # without more detailed break down.
* Output results to a file
* Command line switches to only consider entries within a specified time period.
*
* Allow a nesting hierarchy of error messages, e.g. Java exception, with many subtypes
* Associate input files to specific server in a cluster
* Results from different input files are not time sorted in output.
* Implement concurent processing of multiple input files
"""
import sys
import os
import re
import glob
from datetime import datetime, timedelta
#from datetime import timedelta
from collections import deque
#from operator import attrgetter
import statistics
import csv
timestampFormat = '%m/%d/%Y %H:%M:%S'
# Regex used to match relevant loglines
#Example line: 02/04/2015 12:33:35, 0, 0, 2, Thr 6480, Agentry Startup
#line_regex = re.compile(r"(\d{2}[-/]\d{2}[-/]\d{4}\s+\d{2}:\d{2}:\d{2}),\s*(\d+),\s*(\d+),\s*(\d+),\s*Thr\s*(\d+),\s*(.*)")
# Date Time , Type , Group , Id , thread, Message
#Command Line switches
showUsers = True
start = None
end = None
onlyFirst = False
eventPatternFile = "EventPatterns.csv"
showDetail = False
typeFilter = None
debug = False
hideValues = False
def line_regex():
return re.compile(r"(\d{2}[-/]\d{2}[-/]\d{4}\s+\d{2}:\d{2}:\d{2}),\s*(\d+),\s*(\d+),\s*(\d+),\s*Thr\s*(\d+),\s*(.*)")
class EventPattern:
_patterns = []
def __init__(self, name, regEx, parent, userError):
self.patternId = (name)
self.name = name
self.regEx = re.compile(regEx)
self.userError = userError
self.groupNames = []
self.groupValues = []
for i in self.regEx.groupindex:
self.groupNames.append(i)
self.groupValues.append(set())
self.parentId = (parent)
self.events = []
self.subPatterns = []
if len(self.parentId) > 0:
EventPattern.addSubPattern(self)
else:
EventPattern._patterns.append(self)
@staticmethod
def addSubPattern(pattern):
ret = False
for p in EventPattern._patterns:
if pattern.parentId == p.patternId:
p.subPatterns.append(pattern)
ret = True
break
else:
sp = EventPattern.findSubPatternWithId(p, pattern.parentId)
if sp != None:
sp.subPatterns.append(pattern)
ret = True
break
return ret
@staticmethod
def findSubPatternWithId(pattern, id):
for p in pattern.subPatterns:
if p.patternId == id:
return p
sp = EventPattern.findSubPatternWithId(p, id)
if sp != None:
return sp
return None
@staticmethod
def loadCsvFile(file):
with open(file) as csv_file:
csv_reader = csv.DictReader(csv_file)
line_count = 0
for row in csv_reader:
if debug:
print(row)
EventPattern(row['Name'], row['MessageRegEx'], row['parent'], row['UserError'])
line_count +=1
@staticmethod
def mainMatchEvent(event):
ret = None
if typeFilter != None and event.type != typeFilter:
return None
for p in EventPattern._patterns:
match = p.regEx.match(event.message)
if not match:
continue
if len(p.subPatterns) > 0:
sp = EventPattern.matchEvent(p, event)
if sp != None:
print
return sp
ret = p
p.addEvent(event, match)
break
return ret
def matchEvent(self, event):
ret = None
match = self.regEx.match(event.message)
if not match:
return None
if len(self.subPatterns) > 0:
for p in self.subPatterns:
ret = EventPattern.matchEvent(p, event)
if ret != None:
#print(" "+ p.name)
return p
self.addEvent(event, match)
return self
def addEvent(self, event, match):
self.events.append(event)
for ng in match.groupdict():
index = self.groupNames.index(ng)
value = match.group(ng)
if value == None:
value = "-None-"
self.groupValues[index].add(value)
@staticmethod
def printResults():
for p in EventPattern._patterns:
print(p)
def __str__(self):
occurances = len(self.events)
if occurances == 0 and not showDetail and len(self.subPatterns) == 0:
return("")
description = self.regEx.pattern
if not debug and len(self.userError) > 0:
description = self.userError
if hideValues:
ret = "{1:4d}, {0}, \"{2}\"\n".format(self.name, occurances, description)
else:
ret = "*** {1:4d}x {0} - {2}\n".format(self.name, occurances, description)
if self.groupNames != None:
for n in self.groupNames:
if n[0] == '_' and not showDetail:
continue
index = self.groupNames.index(n)
numValues = len((self.groupValues[index]))
if numValues > 0:
values = ', '.join(self.groupValues[index])
else:
values = ''
ret += " {0} ({2}): {1}\n".format(n, values, numValues)
for p in self.subPatterns:
ret += p.__str__()
return (ret)
__repr__ = __str__
class Event:
"""A parsed line from event.log """
# Date Time , Type , Group , Id , thread, Message
def __init__(self, match):
## self.timestamp = match.group(1)
## if start and self.timestamp < start:
## raise ValueError('Line pior to start time')
## if end and self.timestamp > end:
## raise ValueError('Line after end time')
self.match = match #ToDo LRB: Review need to store full match object.
self.type = int(match.group(2))
self.group = match.group(3)
self.id = match.group(4)
self.thread = match.group(5)
self.message = match.group(6).strip()
def __str__(self):
return ("{0} {1} {2} {2} Thr{3}-{4}".format(self.type, self.group, self.id, self.thread, self.message))
__repr__ = __str__
class Repo:
"""Repository of all information being analyzed"""
def __init__(self):
Repo._initEventPatterns()
def __repr__(self):
return ""
__str__ = __repr__
@staticmethod
def _initEventPatterns():
EventPattern.loadCsvFile(eventPatternFile)
def mainLoop(files = ['events.log'], users=[]):
"""Main processing loop"""
store = Repo();
lines = 0
matches = 0
regex = None
regex = line_regex()
for fileName in files:
if debug:
print ("********* Processing file {0}\n".format(fileName))
with open(fileName, "r") as in_file:
# Loop over each log line
for line in in_file:
# If log line matches our regex, print to console, and output file
lines += 1
match = regex.match(line)
if match:
matches += 1
try:
e = Event(match)
#print('Match Found: {0}'.format(line))
except ValueError:
pass
print('Matching line skipped: {0}'.format(line))
else:
EventPattern.mainMatchEvent(e)
if debug:
print ('******************** Finished processing all files ***********************************')
print ("Lines found {0}".format(lines))
print ("Matches found {0}".format(matches))
print ("{0}\n".format(store))
EventPattern.printResults()
def myMain(argv):
global showUsers, start, end, onlyFirst, eventPatternFile, showDetail, typeFilter, debug, hideValues
# if len(argv) < 2:
if len(argv) < 0:
print ("Need to provide a space separated list of files (which all include a .) and (optionally) users (which don't include a .)")
return
else:
files = []
users = []
for arg in argv[1:]:
if debug:
print(arg)
if start == True:
start = arg
elif end == True:
end = arg
elif arg == '-debug':
debug = True
elif arg == '-error0':
typeFilter = 0
elif arg == '-error1':
typeFilter = 1
elif arg == '-onlyFirst':
onlyFirst = True
elif arg == '-showDetail':
showDetail = True
elif arg == '-hideValues':
hideValues = True
elif arg == '-start':
start = True
elif arg == '-end':
end = True
elif arg.lower().find('.csv') >= 0:
eventPatternFile = arg.lower()
elif arg.find('.') < 0:
users.append(arg.lower())
else:
if arg.find('*') <0:
files.append(arg)
else:
files.append(glob.glob(arg))
if eventPatternFile == None:
print ("Need to specify an EventPatterns.csv file")
return
elif len(files) < 1:
files.append('events.log')
mainLoop(files, users)
#print ("Need to specify at least one event file to process")
#return
else:
if debug:
print(files)
mainLoop(files, users)
if __name__ == '__main__':
myMain(sys.argv)
else:
mainLoop()
| StarcoderdataPython |
3246569 | import logging
from pathlib import Path
import os
import time
import pytest
import yaml
import re
from kubernetes import client, config
from kubernetes.client.rest import ApiException
from jinja2 import Template
log = logging.getLogger(__name__)
meta = yaml.safe_load(Path("metadata.yaml").read_text())
KUBECONFIG = os.environ.get("TESTING_KUBECONFIG", "~/.kube/config")
config.load_kube_config(KUBECONFIG) # TODO: check how to use this with designated file
@pytest.mark.abort_on_fail
async def test_build_and_deploy(ops_test):
charm = await ops_test.build_charm(".")
# Due to: https://github.com/juju/python-libjuju/issues/515
# We have to use the k8s API to wait, we cannot use:
# ops_test.model.applications['gatekeeper'].units[0].workload_status
model_name = ops_test.model_name
role_binding_file = Path("/tmp/k8s-rolebinding.yaml")
with open("docs/gatekeeper-rb.yaml.template", "r") as fh:
template = Template(fh.read())
role_binding_file.write_text(
template.render(
service_account_user=f"system:serviceaccount:{model_name}:opa-manager-test-operator"
)
)
role_binding = yaml.load_all(role_binding_file.read_text(), Loader=yaml.FullLoader)
with client.ApiClient() as api_client:
api_instance = client.RbacAuthorizationV1Api(api_client)
try:
for k8s_obj in role_binding:
if k8s_obj["kind"] == "ClusterRoleBinding":
api_instance.create_cluster_role_binding(body=k8s_obj)
if k8s_obj["kind"] == "ClusterRole":
api_instance.create_cluster_role(body=k8s_obj)
except ApiException as err:
if err.status == 409:
# ignore "already exists" errors so that we can recover from
# partially failed setups
pass
else:
raise
resources = {"gatekeeper-image": "openpolicyagent/gatekeeper:v3.2.3"}
for series in meta["series"]:
await ops_test.model.deploy(
charm, application_name="opa-manager-test", series=series, resources=resources
)
await ops_test.model.wait_for_idle(wait_for_active=True, timeout=60 * 60)
with client.ApiClient() as api_client:
api_instance = client.CoreV1Api(api_client)
try:
while True:
pods = api_instance.list_namespaced_pod(model_name)
pod = [
pod
for pod in pods.items
if re.search(
r"opa-manager-test-(([a-z0-9]){2,}){1}-{1}(([a-z0-9]){2,}){1}",
pod.metadata.name,
)
is not None
][0]
if [
condition
for condition in pod.status.conditions
if condition.type == "ContainersReady"
][0].status == "True":
break
time.sleep(5)
except ApiException:
raise
ca_cert = ""
with client.ApiClient() as api_client:
# Create an instance of the API class
api_instance = client.CoreV1Api(api_client)
name = "gatekeeper-webhook-server-cert"
ca_cert = api_instance.read_namespaced_secret(name, model_name).data["ca.crt"]
with client.ApiClient() as api_client:
# Create an instance of the API class
api_instance = client.AdmissionregistrationV1Api(api_client)
name = f"{model_name}-gatekeeper-validating-webhook-configuration"
for i in range(1):
body = [
{
"op": "replace",
"path": f"/webhooks/{i}/clientConfig/caBundle",
"value": f"{ca_cert}",
}
]
api_instance.patch_validating_webhook_configuration(name, body)
async def test_status_messages(ops_test):
"""Validate that the status messages are correct."""
expected_messages = {}
for app, message in expected_messages.items():
for unit in ops_test.model.applications[app].units:
assert unit.workload_status_message == message
| StarcoderdataPython |
1736224 | <filename>utils/file_util.py
import yaml
from simulator.card_defs import Card, Pip, Suit, PIP_CODE, SUIT_CODE
def load_deck_from_yaml(filename):
with open(filename, "r") as f:
t = yaml.safe_load(f)
# self.logger.debug(f"Loaded deck from file: {t}.")
deck = []
for c in t:
suit = next(key for key, value in SUIT_CODE.items() if value == c[:1])
pip = next(key for key, value in PIP_CODE.items() if value == c[1])
deck.append(Card(suit, pip))
return deck
def save_deck_as_yaml(deck, filename):
a = []
for i, c in enumerate(deck):
a.append(c.code)
with open(filename, "w") as f:
yaml.safe_dump(a, f)
| StarcoderdataPython |
32530 | # create this file
# rerouting all requests that have ‘api’ in the url to the <code>apps.core.urls
from django.conf.urls import url
from django.urls import path
from rest_framework import routers
from base.src import views
from base.src.views import InitViewSet
#from base.src.views import UploadFileForm
#upload stuff
from django.conf import settings
from django.conf.urls.static import static
router = routers.DefaultRouter()
router.register(r'users', views.UserViewSet)
router.register(r'groups', views.GroupViewSet)
#router.register(r'titles', TitlesViewSet, base_name='titles')
urlpatterns = [
path(r'pawsc', InitViewSet.as_view()),
path(r'pawsc/upload', views.simple_upload, name='simple_upload'),
path(r'pawsc/home', views.home, name='home')
]
urlpatterns += router.urls
| StarcoderdataPython |
185876 | <filename>terradactile/terradactile/app.py
import tempfile
from os.path import join, splitext, dirname
from os import listdir, mkdir, environ
import urllib.request
import io
import shutil
from math import log, tan, pi
from itertools import product
import sys
import boto3
import json
import uuid
import csv
from pyproj import Proj, transform
from osgeo import gdal
s3 = boto3.resource("s3")
tile_url = "https://s3.amazonaws.com/elevation-tiles-prod/v2/geotiff/{z}/{x}/{y}.tif"
s3_bucket = environ.get("BUCKET")
def respond(err, res=None, origin=None):
return {
'statusCode': '400' if err else '200',
'body': err if err else json.dumps(res),
'headers': {
'Content-Type': 'application/json',
"Access-Control-Allow-Origin" : origin,
'Access-Control-Allow-Methods': 'OPTIONS,POST,GET',
'Access-Control-Allow-Headers': 'Content-Type'
},
}
def reproject_point(x1, y1, in_epsg, out_epsg):
inProj = Proj(f'epsg:{in_epsg}')
outProj = Proj(f'epsg:{out_epsg}')
return transform(inProj, outProj, x1, y1, always_xy=True)
def download(output_path, tiles, clip_bounds, verbose=True):
''' Download list of tiles to a temporary directory and return its name.
'''
dir = dirname(output_path)
_, ext = splitext(output_path)
merge_geotiff = bool(ext.lower() in ('.tif', '.tiff', '.geotiff'))
files = []
for (z, x, y) in tiles:
try:
response = urllib.request.urlopen(tile_url.format(z=z, x=x, y=y))
if response.getcode() != 200:
print(('No such tile: {}'.format((z, x, y))))
pass
if verbose:
print('Downloaded', response.url, file=sys.stderr)
with io.open(join(dir, '{}-{}-{}.tif'.format(z, x, y)), 'wb') as file:
file.write(response.read())
files.append(file.name)
except urllib.error.URLError as e:
ResponseData = e.read().decode("utf8", 'ignore')
print(f"ERROR: {ResponseData}")
if merge_geotiff:
if verbose:
print('Combining', len(files), 'into', output_path, '...', file=sys.stderr)
vrt = gdal.BuildVRT("/tmp/mosaic.vrt", files)
minX, minY = reproject_point(clip_bounds[0], clip_bounds[1], 4326, 3857)
maxX, maxY = reproject_point(clip_bounds[2], clip_bounds[3], 4326, 3857)
wkt = f"POLYGON(({minX} {maxY}, {maxX} {maxY}, {maxX} {minY}, {minX} {minY}, {minX} {maxY}))"
out_csv = "/tmp/out.csv"
with open(out_csv, "w") as f:
fieldnames = ['id', 'wkt']
writer = csv.DictWriter(f, fieldnames=fieldnames)
writer.writeheader()
writer.writerow({'id': 1, 'wkt': wkt})
mosaic = gdal.Warp(output_path, vrt, cutlineDSName=out_csv, cropToCutline=True, format="GTiff")
mosaic.FlushCache()
mosaic = None
else:
if verbose:
print('Moving', dir, 'to', output_path, '...', file=sys.stderr)
shutil.move(dir, output_path)
def mercator(lat, lon, zoom):
x1, y1 = lon * pi/180, lat * pi/180
x2, y2 = x1, log(tan(0.25 * pi + 0.5 * y1))
tiles, diameter = 2 ** zoom, 2 * pi
x3, y3 = int(tiles * (x2 + pi) / diameter), int(tiles * (pi - y2) / diameter)
return x3, y3
def tiles(z, minX, minY, maxX, maxY):
xmin, ymin = mercator(maxY, minX, z)
xmax, ymax = mercator(minY, maxX, z)
xs, ys = range(xmin, xmax+1), range(ymin, ymax+1)
tiles = [(z, x, y) for (y, x) in product(ys, xs)]
return tiles
def tif_to_cog(input_tif, output_cog):
data = gdal.Open(input_tif)
data_geotrans = data.GetGeoTransform()
data_proj = data.GetProjection()
data_array = data.ReadAsArray()
x_size = data.RasterXSize
y_size = data.RasterYSize
num_bands = data.RasterCount
datatype = data.GetRasterBand(1).DataType
data = None
driver = gdal.GetDriverByName('MEM')
data_set = driver.Create('', x_size, y_size, num_bands, datatype)
for i in range(num_bands):
data_set_lyr = data_set.GetRasterBand(i + 1)
if len(data_array.shape) == 2:
data_set_lyr.WriteArray(data_array)
else:
data_set_lyr.WriteArray(data_array[i])
data_set.SetGeoTransform(data_geotrans)
data_set.SetProjection(data_proj)
data_set.BuildOverviews("NEAREST", [2, 4, 8, 16, 32, 64])
driver = gdal.GetDriverByName('GTiff')
data_set2 = driver.CreateCopy(
output_cog,
data_set,
options = [
"COPY_SRC_OVERVIEWS=YES",
"TILED=YES",
"COMPRESS=LZW"
]
)
data_set = None
data_set2 = None
def translate_scale(input_tif, output_tif):
ds = gdal.Open(input_tif)
ds = gdal.Translate(
output_tif,
ds,
format='GTiff',
outputType=1,
scaleParams=[[]]
)
ds = None
def write_to_s3(tmp_path, s3_path):
s3.meta.client.upload_file(tmp_path, s3_bucket, s3_path)
def make_output(input_cog, output, s3_folder):
ds = gdal.Open(input_cog)
output_path = f'/tmp/{s3_folder}/{output}.tif'
gdal.DEMProcessing(
destName=output_path,
srcDS=ds,
processing=output,
format="GTiff",
zFactor=1,
scale=1,
azimuth=315,
altitude=45
)
ds = None
output_cog = f'/tmp/{s3_folder}/{output}_cog.tif'
tif_to_cog(output_path, output_cog)
write_to_s3(output_cog, f'{s3_folder}/{output}.tif')
def lambda_handler(event, context):
origin = event["headers"]["origin"]
allowed_origins = [x.strip(' ') for x in environ.get("ALLOWED_ORIGINS").split(",")]
if origin not in allowed_origins:
return respond("Origin not in allowed origins!", None, "*")
body = json.loads(event['body'])
x1 = body.get("x1")
x2 = body.get("x2")
y1 = body.get("y1")
y2 = body.get("y2")
z = body.get("z")
minX = min([x1, x2])
maxX = max([x1, x2])
minY = min([y1, y2])
maxY = max([y1, y2])
clip_bounds = (minX, minY, maxX, maxY)
req_tiles = tiles(z, minX, minY, maxX, maxY)
s3_folder = str(uuid.uuid4())
mkdir(f"/tmp/{s3_folder}")
mosaic_path = f'/tmp/{s3_folder}/mos.tif'
tile_limit = 50
if len(req_tiles) > tile_limit:
return respond(f"Requested too many tiles ({len(req_tiles)} in total, limit is {tile_limit}). Try a lower zoom level or smaller bbox.")
else:
download(mosaic_path, req_tiles, clip_bounds)
mosaic_cog = f'/tmp/{s3_folder}/mos_cog.tif'
tif_to_cog(mosaic_path, mosaic_cog)
write_to_s3(mosaic_cog, f'{s3_folder}/mosaic.tif')
mosaic_display = f'/tmp/{s3_folder}/mos_display_cog.tif'
translate_scale(mosaic_cog, mosaic_display)
write_to_s3(mosaic_display, f'{s3_folder}/mosaic_display.tif')
outputs = body.get("outputs", [])
outputs.append("hillshade")
for output in outputs:
make_output(mosaic_cog, output, s3_folder)
return respond(None, f"s3://{s3_bucket}/{s3_folder}", origin)
| StarcoderdataPython |
1692222 | # -*- coding: utf-8 -*-
"""
An Eye Tracker can get landmarks of the eyes from an image tensor.
"""
import cv2 as cv
import numpy as np
from config import ConfigOptionMetadata, ConfigOptionPackage
from tracking.eye_tracking import EyeTrackerInput
from tracking.eye_tracking.eye_tracking import EyeTracker
class InfraredEyeTrackerCOP(ConfigOptionPackage):
@staticmethod
def get_options_metadata() -> list:
return [
ConfigOptionMetadata(int, 'eye_tracking_threshold', 88,
'The eye tracking threshold if infrared tracking is used.'),
]
class InfraredEyeTracker(EyeTracker):
@staticmethod
def get_required_option_packages() -> list:
packages = super(InfraredEyeTracker, InfraredEyeTracker).get_required_option_packages()
packages.extend([InfraredEyeTrackerCOP])
return packages
def __init__(self, config):
super().__init__(config)
self.eye_tracking_threshold = config['eye_tracking_threshold']
def track_landmarks(self, input_data: EyeTrackerInput):
super(InfraredEyeTracker, self).track_landmarks(input_data)
self.input = input_data
image = np.copy(self.input.image)
bbox_eye_left = self.input.bbox_eye_left
bbox_eye_right = self.input.bbox_eye_right
x = int(bbox_eye_left["x"])
y = int(bbox_eye_left["y"])
w = int((bbox_eye_right["x"] + bbox_eye_right["width"]) - bbox_eye_left["x"])
h = int(
bbox_eye_left["height"] if bbox_eye_left["height"] > bbox_eye_right["height"] else bbox_eye_right["height"])
image = cv.equalizeHist(image)
roi = image[y:y + h, x:x + w]
image = cv.cvtColor(image, cv.COLOR_GRAY2BGR)
cv.rectangle(image, (x, y), (x + w, y + h), (0, 0, 255), 2)
roi[:, int(w / 3):int(w / 3) * 2] = 255
# print_numpy(roi, True, True)
roi = cv.GaussianBlur(roi, (11, 11), 0)
thresh = self.eye_tracking_threshold
_, roi = cv.threshold(roi, thresh, 255, cv.THRESH_BINARY_INV)
kernel = np.ones((5, 5), np.uint8)
roi = cv.dilate(roi, kernel, iterations=2)
roi_left = roi[:, 0:int(w / 2)]
roi_right = roi[:, int(w / 2):w]
roi = cv.cvtColor(roi, cv.COLOR_GRAY2BGR)
x1 = 0
y1 = 0
x2 = 0
y2 = 0
contours, _ = cv.findContours(roi_left, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
contours = sorted(contours, key=lambda x: cv.contourArea(x), reverse=True)
for cnt in contours:
(x1, y1, w1, h1) = cv.boundingRect(cnt)
cv.rectangle(roi, (x1, y1), (x1 + w1, y1 + h1), (0, 255, 0), 2)
y1 += y + int(h1 / 2)
x1 += x + w1 - 15 # *2
image[y1 - 3:y1 + 3, x1 - 3:x1 + 3] = np.array([0, 255, 0])
break
contours, _ = cv.findContours(roi_right, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
contours = sorted(contours, key=lambda x: cv.contourArea(x), reverse=True)
offset_w = int(w / 2)
for cnt in contours:
(x2, y2, w2, h2) = cv.boundingRect(cnt)
cv.rectangle(roi, (x2 + offset_w, y2), (x2 + w2 + offset_w, y2 + h2), (0, 255, 0), 2)
y2 += y + int(h2 / 2)
x2 += x + int(w / 2) + 15
image[y2 - 3:y2 + 3, x2 - 3:x2 + 3] = np.array([0, 255, 0])
break
if x1 == 0 and y1 == 0:
y1 += y + int(h / 2)
x1 += x + int(w / 4)
if x2 == 0 and y2 == 0:
y2 += y + int(h / 2)
x2 += x + int(w / 4) * 3
self.tracked_data = np.asarray([[x1, y1], [x2, y2]])
return self.tracked_data
| StarcoderdataPython |
3360405 | <reponame>wgzhao/trino-admin
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import logging
from fabric import state
from fabric.context_managers import hide, settings
from fabric.decorators import hosts, parallel, roles, serial
from fabric.exceptions import NetworkError
from fabric.tasks import Task
from fudge import Fake, patched_context, with_fakes, clear_expectations
from fabric.state import env
import fabric.api
import fabric.operations
import fabric.utils
from mock import call
from mock import patch
from tests.base_test_case import BaseTestCase
from trinoadmin.util.application import Application
from trinoadmin.fabric_patches import execute
APPLICATION_NAME = 'foo'
@patch('prestoadmin.util.application.filesystem')
@patch('prestoadmin.util.application.logging.config')
class FabricPatchesTest(BaseTestCase):
def setUp(self):
# basicConfig is a noop if there are already handlers
# present on the root logger, remove them all here
self.__old_log_handlers = []
for handler in logging.root.handlers:
self.__old_log_handlers.append(handler)
logging.root.removeHandler(handler)
# Load prestoadmin so that the monkeypatching is in place
BaseTestCase.setUp(self, capture_output=True)
def tearDown(self):
# restore the old log handlers
for handler in logging.root.handlers:
logging.root.removeHandler(handler)
for handler in self.__old_log_handlers:
logging.root.addHandler(handler)
BaseTestCase.tearDown(self)
@patch('prestoadmin.fabric_patches._LOGGER')
def test_warn_api_prints_out_message(self, logger_mock, log_conf_mock,
filesystem_mock):
with Application(APPLICATION_NAME):
fabric.api.warn("Test warning.")
logger_mock.warn.assert_has_calls(
[
call('Test warning.\n\nNone\n'),
]
)
self.assertEqual(
'\nWarning: Test warning.\n\n',
self.test_stderr.getvalue()
)
@patch('prestoadmin.fabric_patches._LOGGER')
def test_warn_utils_prints_out_message(self, logger_mock, log_conf_mock,
filesystem_mock):
with Application(APPLICATION_NAME):
fabric.utils.warn("Test warning.")
logger_mock.warn.assert_has_calls(
[
call('Test warning.\n\nNone\n'),
]
)
self.assertEqual(
'\nWarning: Test warning.\n\n',
self.test_stderr.getvalue()
)
@patch('prestoadmin.fabric_patches._LOGGER')
def test_warn_utils_prints_out_message_with_host(self, logger_mock,
log_conf_mock, fs_mock):
fabric.api.env.host = 'host'
with Application(APPLICATION_NAME):
fabric.utils.warn("Test warning.")
logger_mock.warn.assert_has_calls(
[
call('[host] Test warning.\n\nNone\n'),
]
)
self.assertEqual(
'\nWarning: [host] Test warning.\n\n',
self.test_stderr.getvalue()
)
@patch('fabric.operations._run_command')
@patch('prestoadmin.fabric_patches._LOGGER')
def test_run_api_logs_stdout(self, logger_mock, run_command_mock,
logging_config_mock, filesystem_mock):
self._execute_operation_test(run_command_mock, logger_mock,
fabric.api.run)
@patch('fabric.operations._run_command')
@patch('prestoadmin.fabric_patches._LOGGER')
def test_run_op_logs_stdout(self, logger_mock, run_command_mock,
logging_config_mock, filesystem_mock):
self._execute_operation_test(run_command_mock, logger_mock,
fabric.operations.run)
@patch('fabric.operations._run_command')
@patch('prestoadmin.fabric_patches._LOGGER')
def test_sudo_api_logs_stdout(self, logger_mock, run_command_mock,
logging_config_mock, filesystem_mock):
self._execute_operation_test(run_command_mock, logger_mock,
fabric.api.sudo)
@patch('fabric.operations._run_command')
@patch('prestoadmin.fabric_patches._LOGGER')
def test_sudo_op_logs_stdout(self, logger_mock, run_command_mock,
logging_config_mock, filesystem_mock):
self._execute_operation_test(run_command_mock, logger_mock,
fabric.operations.sudo)
def _execute_operation_test(self, run_command_mock, logger_mock, func):
out = fabric.operations._AttributeString('Test warning')
out.command = 'echo "Test warning"'
out.real_command = '/bin/bash echo "Test warning"'
out.stderr = ''
run_command_mock.return_value = out
fabric.api.env.host_string = 'localhost'
with Application(APPLICATION_NAME):
func('echo "Test warning"')
pass
logger_mock.info.assert_has_calls(
[
call('\nCOMMAND: echo "Test warning"\nFULL COMMAND: /bin/bash'
' echo "Test warning"\nSTDOUT: Test warning\nSTDERR: '),
]
)
# Most of these tests were taken or modified from fabric's test_tasks.py
# Below is the license for the fabric code:
# Copyright (c) 2009-2015 <NAME>
# Copyright (c) 2008-2009 <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,
# this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
class TestExecute(BaseTestCase):
def setUp(self):
clear_expectations()
super(TestExecute, self).setUp(capture_output=True)
@with_fakes
def test_calls_task_function_objects(self):
"""
should execute the passed-in function object
"""
execute(Fake(callable=True, expect_call=True))
@with_fakes
def test_should_look_up_task_name(self):
"""
should also be able to handle task name strings
"""
name = 'task1'
commands = {name: Fake(callable=True, expect_call=True)}
with patched_context(fabric.state, 'commands', commands):
execute(name)
@with_fakes
def test_should_handle_name_of_Task_object(self):
"""
handle corner case of Task object referrred to by name
"""
name = 'task2'
class MyTask(Task):
run = Fake(callable=True, expect_call=True)
mytask = MyTask()
mytask.name = name
commands = {name: mytask}
with patched_context(fabric.state, 'commands', commands):
execute(name)
def test_should_abort_if_task_name_not_found(self):
"""
should abort if given an invalid task name
"""
self.assertRaisesRegexp(SystemExit,
"'thisisnotavalidtaskname' is not callable or"
" a valid task name",
execute, 'thisisnotavalidtaskname')
def test_should_not_abort_if_task_name_not_found_with_skip(self):
"""
should not abort if given an invalid task name
and skip_unknown_tasks in env
"""
env.skip_unknown_tasks = True
execute('thisisnotavalidtaskname')
del env['skip_unknown_tasks']
@with_fakes
def test_should_pass_through_args_kwargs(self):
"""
should pass in any additional args, kwargs to the given task.
"""
task = (
Fake(callable=True, expect_call=True)
.with_args('foo', biz='baz')
)
execute(task, 'foo', biz='baz')
@with_fakes
def test_should_honor_hosts_kwarg(self):
"""
should use hosts kwarg to set run list
"""
# Make two full copies of a host list
hostlist = ['a', 'b', 'c']
hosts = hostlist[:]
# Side-effect which asserts the value of env.host_string when it runs
def host_string():
self.assertEqual(env.host_string, hostlist.pop(0))
task = Fake(callable=True, expect_call=True).calls(host_string)
with hide('everything'):
execute(task, hosts=hosts)
def test_should_honor_hosts_decorator(self):
"""
should honor @hosts on passed-in task objects
"""
# Make two full copies of a host list
hostlist = ['a', 'b', 'c']
@hosts(*hostlist[:])
def task():
self.assertEqual(env.host_string, hostlist.pop(0))
with hide('running'):
execute(task)
def test_should_honor_roles_decorator(self):
"""
should honor @roles on passed-in task objects
"""
# Make two full copies of a host list
roledefs = {'role1': ['a', 'b', 'c'], 'role2': ['d', 'e']}
role_copy = roledefs['role1'][:]
@roles('role1')
def task():
self.assertEqual(env.host_string, role_copy.pop(0))
with settings(hide('running'), roledefs=roledefs):
execute(task)
@with_fakes
def test_should_set_env_command_to_string_arg(self):
"""
should set env.command to any string arg, if given
"""
name = "foo"
def command():
self.assert_(env.command, name)
task = Fake(callable=True, expect_call=True).calls(command)
with patched_context(fabric.state, 'commands', {name: task}):
execute(name)
@with_fakes
def test_should_set_env_command_to_name_attr(self):
"""
should set env.command to TaskSubclass.name if possible
"""
name = "foo"
def command():
self.assertEqual(env.command, name)
task = (
Fake(callable=True, expect_call=True)
.has_attr(name=name)
.calls(command)
)
execute(task)
@with_fakes
def test_should_set_all_hosts(self):
"""
should set env.all_hosts to its derived host list
"""
hosts = ['a', 'b']
roledefs = {'r1': ['c', 'd']}
roles = ['r1']
exclude_hosts = ['a']
def command():
self.assertEqual(set(env.all_hosts), set(['b', 'c', 'd']))
task = Fake(callable=True, expect_call=True).calls(command)
with settings(hide('everything'), roledefs=roledefs):
execute(
task, hosts=hosts, roles=roles, exclude_hosts=exclude_hosts
)
def test_should_print_executing_line_per_host(self):
"""
should print "Executing" line once per host
"""
state.output.running = True
def task():
pass
execute(task, hosts=['host1', 'host2'])
self.assertEqual(sys.stdout.getvalue(),
"""[host1] Executing task 'task'
[host2] Executing task 'task'
""")
def test_should_not_print_executing_line_for_singletons(self):
"""
should not print "Executing" line for non-networked tasks
"""
def task():
pass
with settings(hosts=[]): # protect against really odd test bleed :(
execute(task)
self.assertEqual(sys.stdout.getvalue(), "")
def test_should_return_dict_for_base_case(self):
"""
Non-network-related tasks should return a dict w/ special key
"""
def task():
return "foo"
self.assertEqual(execute(task), {'<local-only>': 'foo'})
def test_should_return_dict_for_serial_use_case(self):
"""
Networked but serial tasks should return per-host-string dict
"""
ports = [2200, 2201]
hosts = map(lambda x: '127.0.0.1:%s' % x, ports)
@serial
def task():
return "foo"
with hide('everything'):
self.assertEqual(execute(task, hosts=hosts), {
'127.0.0.1:2200': 'foo',
'127.0.0.1:2201': 'foo'
})
@patch('fabric.operations._run_command')
@patch('prestoadmin.fabric_patches.log_output')
def test_should_preserve_None_for_non_returning_tasks(self, log_mock,
run_mock):
"""
Tasks which don't return anything should still show up in the dict
"""
def local_task():
pass
def remote_task():
with hide('everything'):
run_mock.return_value = 'hello'
fabric.api.run('a command')
self.assertEqual(execute(local_task), {'<local-only>': None})
with hide('everything'):
self.assertEqual(
execute(remote_task, hosts=['host']),
{'host': None}
)
def test_should_use_sentinel_for_tasks_that_errored(self):
"""
Tasks which errored but didn't abort should contain an eg NetworkError
"""
def task():
fabric.api.run("whoops")
host_string = 'localhost:1234'
with settings(hide('everything'), skip_bad_hosts=True):
retval = execute(task, hosts=[host_string])
assert isinstance(retval[host_string], NetworkError)
def test_parallel_return_values(self):
"""
Parallel mode should still return values as in serial mode
"""
@parallel
@hosts('127.0.0.1:2200', '127.0.0.1:2201')
def task():
return env.host_string.split(':')[1]
with hide('everything'):
retval = execute(task)
self.assertEqual(retval, {'127.0.0.1:2200': '2200',
'127.0.0.1:2201': '2201'})
@with_fakes
def test_should_work_with_Task_subclasses(self):
"""
should work for Task subclasses, not just WrappedCallableTask
"""
class MyTask(Task):
name = "mytask"
run = Fake(callable=True, expect_call=True)
mytask = MyTask()
execute(mytask)
@patch('prestoadmin.fabric_patches.error')
def test_parallel_network_error(self, error_mock):
"""
network error should call error
"""
network_error = NetworkError('Network message')
fabric.state.env.warn_only = False
@parallel
@hosts('127.0.0.1:2200', '127.0.0.1:2201')
def task():
raise network_error
with hide('everything'):
execute(task)
error_mock.assert_called_with('Network message',
exception=network_error.wrapped,
func=fabric.utils.abort)
@patch('prestoadmin.fabric_patches.error')
def test_base_exception_error(self, error_mock):
"""
base exception should call error
"""
value_error = ValueError('error message')
fabric.state.env.warn_only = True
@parallel
@hosts('127.0.0.1:2200', '127.0.0.1:2201')
def task():
raise value_error
with hide('everything'):
execute(task)
# self.assertTrue(error_mock.is_called)
args = error_mock.call_args
self.assertEqual(args[0], ('error message',))
self.assertEqual(type(args[1]['exception']), type(value_error))
self.assertEqual(args[1]['exception'].args, value_error.args)
def test_abort_should_not_raise_error(self):
"""
base exception should call error
"""
fabric.state.env.warn_only = False
@parallel
@hosts('127.0.0.1:2200', '127.0.0.1:2201')
def task():
fabric.utils.abort('aborting')
with hide('everything'):
execute(task)
def test_abort_in_serial_should_not_raise_error(self):
"""
base exception should call error
"""
fabric.state.env.warn_only = False
@serial
@hosts('127.0.0.1:2200', '127.0.0.1:2201')
def task():
fabric.utils.abort('aborting')
with hide('everything'):
execute(task)
def test_arg_exception_should_raise_error(self):
@hosts('127.0.0.1:2200', '127.0.0.1:2201')
def task(arg):
pass
with hide('everything'):
self.assertRaisesRegexp(TypeError,
'task\(\) takes exactly 1 argument'
' \(0 given\)', execute, task)
| StarcoderdataPython |
3377537 | # NOTE: It is the historian's job to make sure that keywords are not repetitive (they are
# otherwise double-counted into counts).
from collections import defaultdict
from collections import OrderedDict
import os
import pandas as pd
import re
import nltk
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
import string
from unidecode import unidecode
import csv
from bs4 import BeautifulSoup, Tag
import sys
import json
import csv
NUM_TOP_WORDS = 20 # The number of top words that we want from each file
CONTEXT_WORDS_AROUND = 50
MAX_EXCLUDE_REGEX_LENGTH = 50
punctuation = ['\.', '/', '\?', '\-', '"', ',', '\\b'] # Punctuation we use within our regexes
data_dirname = os.getcwd() + "/data/"
# Writes all the original interviews that have keywords into a subdirectory.
def write_subcorpora(subcorpora_dirname, filenames, content, keyword_freq_files):
os.mkdir(subcorpora_dirname)
for i in range(len(filenames)):
file = filenames[i]
if file not in keyword_freq_files: continue
new_file = "{}/{}".format(subcorpora_dirname, file)
with open(new_file, "w", encoding = "utf-8") as f:
f.write(content[i])
# Fills in decade years
def fill_years(data, step):
all_years = []
not_given = data["Not given"] if "Not given" in data else 0
for k in data.keys():
if k != "Not given": all_years.append(int(k))
new_data = defaultdict(lambda:0)
new_data["Not given"] = not_given
all_years.sort()
for i in range(all_years[0], all_years[-1] + step, step):
if str(i) in data:
new_data[i] = data[str(i)]
elif i in data:
new_data[i] = data[i]
else:
new_data[i] = 0
return new_data
# Prints out a JSON string that is then read by the Node.js backend.
def print_message(_type, content):
message = {
"type": _type,
"content": content
}
print(json.dumps(message))
# Downloads the NLTK libraries.
def download_nltk():
print_message("progress-message", "Downloading relevant libraries...")
nltk.download('averaged_perceptron_tagger')
nltk.download('stopwords')
nltk.download('punkt')
print_message("progress", 2)
# Reads in arguments into the directories, words, and metadata file needed for the runs.
def read_arguments():
print_message("progress_message", "Reading in run data...")
data = json.loads(sys.argv[1])
runId = data['id']
runName = data['name']
runDate = data['date']
collections = data['collections']
keywords = data['keywordList']
metadata_file_interviews = data['interviews']
metadata_file_interviewees= data['interviewees']
print_message("progress", 4)
return runId, runName, runDate, collections, keywords, metadata_file_interviews, metadata_file_interviewees
# Creates a new folder to store the final data for the current run.
def create_run_directory(runId):
print_message("progress-message", "Creating a directory to store run results...")
dirname = data_dirname + "runs/" + runId
os.mkdir(dirname)
print_message("progress", 5)
return dirname
# Gets punctuation joined by bars (this is punctuation that we decide to count as separation!)
def get_punctuation_for_regex(punc):
return "|".join(punc)
# Converts the keyword list to Python regex form. Returns the full list of words and the
# included and excluded regexes.
def convert_keywords(keywords):
converted_keywords = []
for k in keywords:
# Sorts the included words backwards to make sure we get the longer words first
included_words = k["include"]
included_words = sorted(included_words, key=lambda l: (len(l), l), reverse=True)
punc = get_punctuation_for_regex(punctuation)
included_regexes = []
for w in included_words:
r = r'(?:{})({})(?:{})'.format(punc, w.replace("*", "[a-zA-Z]*"), punc)
included_regexes.append(r)
excluded_words = k["exclude"]
excluded_regexes = []
for w in excluded_words:
r = r"\b{}\b".format(w.replace("*", "[a-zA-Z]*"))
excluded_regexes.append(w)
k["included_regexes"] = included_regexes
k["include"] = included_words
k["excluded_regexes"] = excluded_regexes
converted_keywords.append(k)
return converted_keywords
# Reads all the text from each text file in the corpus directory. TODO: Resolve utf-8.
def read_corpuses(collections):
new_collections = []
for c in collections:
directory = data_dirname + "corpus-files/" + c["id"]
filenames = []
content = []
for file in os.listdir(directory):
if ".txt" not in file: continue
filenames.append(file)
# "ISO-8859-1" encoding otherwise?
with open("{}/{}".format(directory, file), "r", encoding = "utf-8", errors = "ignore") as f:
content.append(f.read())
c["filenames"] = filenames
c["content"] = content
new_collections.append(c)
return new_collections
# Gets the files for inclusion--excludes any files that are only male interviewees or
# interviews with no transcripts.
def get_included_files(collections, df1, df2, runJSON):
files_for_inclusion = {} # Final list of files for inclusion
# Statistics about file inclusion/exclusion
num_files_no_transcript = {} # Total number of files in collection with no transcript
people = {} # Information about individual people (only "Sex" == "Female" and "Sex" == "Unknown")
male_interviews = {} # Interviews that include males
male_plus_interviews = {} # Interviews with both male and non-male interviews
interview_years = {}
interview_years_by_file = {}
total_interviews = 0
#making a dictionary for the interviewees from id to information
interviewee_id_to_metadata= defaultdict(lambda:[])
for i,r in df2.iterrows():
interviewee_id_to_metadata[r["interviewee_id"]]=r
# Needed information across all collections
interview_years_all_collections = defaultdict(lambda:0)
interviewee_metadata_all_collections = defaultdict(lambda:defaultdict(lambda:0))
# Statistics about interviewees --> interviews
interviews_to_interviewees = defaultdict(lambda:[])
filenames_map = {}
for c in collections:
curr_id = c["id"]
files_for_inclusion[curr_id] = {}
num_files_no_transcript[curr_id] = 0
people[curr_id] = {}
male_interviews[curr_id] = {}
male_plus_interviews[curr_id] = {}
interview_years[curr_id] = defaultdict(lambda:0)
interview_years_by_file = defaultdict(lambda:{})
for f in c["filenames"]:
filenames_map[f] = curr_id
for i, r in df1.iterrows():
f = r["project_file_name"]
# Skips files with no project filename (shouldn't happen)
if pd.isnull(f):
continue
# SKips files not in collection
if f not in filenames_map:
continue
curr_c = filenames_map[f]
# Skips files with no transcript
no_transcript = r["no_transcript"]
if not pd.isnull(no_transcript) and no_transcript:
num_files_no_transcript[curr_c] += 1
continue
# If the interviewee is male, marks it and continues (as there may be the same file later on with a non-male interviewee)
for person_id in r["interviewee_ids"].split(";"):
interviewee_info =interviewee_id_to_metadata[person_id]
if len(interviewee_info) != 0:
sex = interviewee_info["sex"]
if not pd.isnull(sex) and sex.strip() == "Male":
male_interviews[curr_c][f] = 1
if f in files_for_inclusion:
male_plus_interviews[curr_c][f] = 1 # Means it contains both male and non-male
continue
# If the current interviewee is non-male and the interview has a male, mark it
if f in male_interviews[curr_c]:
male_plus_interviews[curr_c][f] = 1
male_interviews[curr_c][f] = 0
# At this point, we have a new interview (not previously added) with at least one non-male
# interviewee we want to add!
interviewees_list= r["interviewee_ids"].split(";")
for j in interviewees_list:
info= interviewee_id_to_metadata[j]
if j==0:
continue
interviewee_name = interviewee_id_to_metadata["interviewee_name"]
interviewee_name= str(interviewee_name)
interviews_to_interviewees[f].append(j)
#if interviewee_name not in people:
birth_decade = info["birth_decade"]
education = info["education"]
identified_race = info["identified_race"]
interviewee_birth_country = info["interviewee_birth_country"]
curr_person = {}
curr_person["birth_decade"] = int(birth_decade) if not pd.isnull(birth_decade) and birth_decade.isnumeric() else "Not given"
curr_person["education"] = education if not pd.isnull(education) else "Not given"
curr_person["identified_race"] = identified_race if not pd.isnull(identified_race) else "Not given"
curr_person["sex"] = sex if not pd.isnull(sex) else "Not given"
curr_person["birth_country"] = interviewee_birth_country if not pd.isnull(interviewee_birth_country) else "Not given"
people[j] = curr_person
interviewee_metadata_all_collections["birth_decade"][curr_person["birth_decade"]] += 1
interviewee_metadata_all_collections["education"][curr_person["education"]] += 1
interviewee_metadata_all_collections["race"][curr_person["identified_race"]] += 1
interviewee_metadata_all_collections["sex"][curr_person["sex"]] += 1
interviewee_metadata_all_collections["birth_country"][curr_person["birth_country"]] += 1
files_for_inclusion[curr_c][f] = 1
date_of_first_interview = r["date_of_first_interview"]
if pd.isnull(date_of_first_interview):
interview_years[curr_c]["Not given"] += 1
interview_years_by_file[curr_c][f] = "Not given"
interview_years_all_collections["Not given"] += 1
else:
year = date_of_first_interview.split("/")[2]
# Attempts to fix the two numbered ones; assumes anything that is 00-19 is in 2000s
if len(year) == 2:
if int(year) <= 19:
year = "20{}".format(year)
else:
year = "19{}".format(year)
interview_years[curr_c][year] += 1
interview_years_by_file[curr_c][f] = year
interview_years_all_collections[year] += 1
# Calculates total number of interviews
for c in files_for_inclusion:
total_interviews += sum(files_for_inclusion[c].values())
# Updates the summary report data
runJSON["summary-report"]["total-interviewees"] = len(people)
runJSON["summary-report"]["total-interviews"] = total_interviews
runJSON["summary-report"]["time-range-interviews"] = fill_years(interview_years_all_collections, 1)
runJSON["summary-report"]["time-range-birth-year"] = fill_years(interviewee_metadata_all_collections["birth_decade"], 10)
runJSON["summary-report"]["race"] = interviewee_metadata_all_collections["race"]
runJSON["summary-report"]["sex"] = interviewee_metadata_all_collections["sex"]
runJSON["summary-report"]["education"] = interviewee_metadata_all_collections["education"]
runJSON["summary-report"]["birth_country"] = interviewee_metadata_all_collections["birth_country"]
metadata = {
"files_for_inclusion": files_for_inclusion,
"people": people,
"num_files_no_transcript": num_files_no_transcript,
"male_interviews": male_interviews,
"male_plus_interviews": male_plus_interviews,
"interview_years": interview_years,
"interview_years_by_file": interview_years_by_file,
"interviews_to_interviewees": interviews_to_interviewees,
"interviewee_ids_to_metadata": interviewee_id_to_metadata
}
return metadata
# Reads in the metadata to collect statistics and excludes any files that are only male
# interviewees or interviews with no transcripts for each collection.
def read_metadata(collections, metadata_file_interviews, metadata_file_interviewees, runJSON):
df1 = pd.read_csv(data_dirname + "metadata-files/" + metadata_file_interviews, encoding = "utf-8", header = 0)
df2 = pd.read_csv(data_dirname + "metadata-files/" + metadata_file_interviewees, encoding = "utf-8", header = 0)
return get_included_files(collections, df1, df2, runJSON)
# Downloads relevant libraries and otherwise sets us up for a successful run.
def set_up(runJSON):
print_message("progress-message", "Setting up the subcorpora run...")
# download_nltk()
runId, runName, runDate, collections, keywords, metadata_file_interviews, metadata_file_interviewees = read_arguments()
runJSON["id"] = runId
runJSON["name"] = runName
runJSON["date"] = runDate
runJSON["metadata_file_interviews"] = metadata_file_interviews
runJSON["metadata_file_interviewees"] = metadata_file_interviewees
runJSON["collections"] = [c["id"] for c in collections]
runJSON["keyword-lists"] = [k["name"] + "-" + k["version"] for k in keywords]
runDirname = create_run_directory(runId)
runJSON["runDirname"] = runDirname
runJSON["summary-report"] = {
"total-collections": len(collections),
"total-keywords": sum([len(k["include"]) for k in keywords]),
"total-collections-with-keywords": 0,
"total-interviews-with-keywords": 0,
"total-keywords-found": 0,
"keywords-over-time": defaultdict(lambda:defaultdict(lambda:0)),
"keyword-counts": defaultdict(lambda:0)
}
keyword_regexes = convert_keywords(keywords)
collections = read_corpuses(collections)
metadata = read_metadata(collections, metadata_file_interviews, metadata_file_interviewees, runJSON)
return collections, keywords, keyword_regexes, metadata, runDirname
# Gets n words before and after the match and returns them
def get_words_around(m_text, m_loc, content, n):
before_text = content[:m_loc].split(" ")
after_loc = m_loc + len(m_text)
after_text = content[after_loc:].split(" ")
before_len = len(before_text) - n
if before_len < 0:
before_len = 0
after_len = n if n <= len(after_text) else len(after_text)
return " ".join(before_text[before_len:]), m_text, " ".join(after_text[:after_len])
# Checks to see if there's anything it needs to exclude
def need_to_exclude(before, after, m_text, exclude_regexes):
m_len = len(m_text.split(" "))
if len(exclude_regexes)==1 and exclude_regexes[0]=="":
return False
for r in exclude_regexes:
r_len = len(r.split(" "))
leftover_len = r_len - m_len
if leftover_len < 0: leftover_len = 0
# Checks if the adding on the before has the regex
prev = before[(len(before)-leftover_len):]
prev_text = "{} {}".format(" ".join(prev), m_text).strip()
if re.match(r, prev_text, re.IGNORECASE): return True
# Checks if the adding on the after has the regex
af = after[:leftover_len]
af_text = "{} {}".format(m_text, " ".join(af)).strip()
if re.match(r, af_text, re.IGNORECASE): return True
return False
# Finds the keywords in each file.
def find_keywords(files_for_inclusion, filenames, content, words, included_regexes, excluded_regexes, interview_years_by_file, people, interviews_to_interviewees, runJSON, currRunJSON):
# Stores the frequency of each keyword across all files (keyword --> count)
keyword_freq = defaultdict(lambda:0)
keyword_to_dates = defaultdict(lambda:defaultdict(lambda:0))
# Basic statistics
num_with_keywords = 0
num_interviews = 0
total_keywords = 0 # Total number of keywords found in all files
all_matches = {}
time_range_interviews = defaultdict(lambda:0)
# Interviewee statistics
birth_decade_map = defaultdict(lambda:0)
sex_map = defaultdict(lambda:0)
education_map = defaultdict(lambda:0)
race_map = defaultdict(lambda:0)
birth_country_map = defaultdict(lambda:0)
interviewees_done = {}
#match_statistics
match_birth_decade_map = defaultdict(lambda:0)
match_sex_map = defaultdict(lambda:0)
match_education_map = defaultdict(lambda:0)
match_race_map = defaultdict(lambda:0)
match_birth_country_map = defaultdict(lambda:0)
match_interviewees_done = {}
# Loops through each file, looking for keywords, and stores the matches
for i in range(len(content)):
file = filenames[i]
if file not in files_for_inclusion or files_for_inclusion[file] == 0:
continue
date_of_interview = "Not given"
if file in interview_years_by_file:
date_of_interview = interview_years_by_file[file]
c = " {}.".format(" ".join(content[i].split())) # Splits the content by spaces (combines newlines, etc.)
# Stores the file's keyword counts and matches
curr_keywords = defaultdict(lambda:0)
curr_matches = []
time_range_interviews[date_of_interview] += 1
num_interviews += 1
interviewees = interviews_to_interviewees[file]
for interviewee in interviewees:
if interviewee in interviewees_done:
continue
interviewee_info = people[interviewee]
race_map[interviewee_info["identified_race"]] += 1
birth_decade_map[interviewee_info["birth_decade"]] += 1
sex_map[interviewee_info["sex"]] += 1
education_map[interviewee_info["education"]] += 1
birth_country_map[interviewee_info["birth_country"]] += 1
interviewees_done[interviewee] = 1
# Loops through the regexes
for j in range(len(included_regexes)):
curr_r = included_regexes[j]
regex = re.compile(curr_r, re.IGNORECASE) # Currently ignores the case
for m in regex.finditer(c):
m_loc = m.start()
m_text = m.group(1)
w = words[j]
before, new_m_text, after = get_words_around(m_text, m_loc, c, MAX_EXCLUDE_REGEX_LENGTH)
if need_to_exclude(before, after, new_m_text, excluded_regexes):
continue
# Updates the statistics
keyword_freq[w] += 1
curr_keywords[w] += 1
runJSON["summary-report"]["keyword-counts"][w] += 1
keyword_to_dates[w][date_of_interview] += 1
total_keywords += 1
runJSON["summary-report"]["keywords-over-time"][w][date_of_interview] += 1
# Adds it onto the matches
curr_matches.append([m_loc, before, new_m_text, after])
interviewees = interviews_to_interviewees[file]
for interviewee in interviewees:
if interviewee in match_interviewees_done:
continue
interviewee_info = people[interviewee]
match_race_map[interviewee_info["identified_race"]] += 1
match_birth_decade_map[interviewee_info["birth_decade"]] += 1
match_sex_map[interviewee_info["sex"]] += 1
match_education_map[interviewee_info["education"]] += 1
match_birth_country_map[interviewee_info["birth_country"]] += 1
match_interviewees_done[interviewee] = 1
if len(curr_keywords) > 0:
num_with_keywords += 1
all_matches[file] = curr_matches
currRunJSON["total-keywords"] = len(included_regexes)
currRunJSON["total-keywords-found"] = total_keywords
currRunJSON["total-interviews"] = num_interviews
currRunJSON["total-interviews-with-keywords"] = num_with_keywords
currRunJSON["time-range-interviews"] = fill_years(time_range_interviews, 1)
currRunJSON["keyword-counts"] = keyword_freq
currRunJSON["sex"] = sex_map
currRunJSON["race"] = race_map
currRunJSON["time-range-birth-year"] = fill_years(birth_decade_map, 10)
currRunJSON["education"] = education_map
currRunJSON["birth_country"] = birth_country_map
#writes keyword counts to csv
with open('keywordfreq.csv', 'w') as csvfile:
for word in keyword_freq:
data_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
data_writer.writerow([word, keyword_freq[word]])
#writes match stats to csv
with open('match_stats.csv', 'w') as csvfile:
data_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
data_writer.writerow(["total", len(match_interviewees_done)])
for race in race_map:
data_writer.writerow([race, match_race_map[race]])
for sex in sex_map:
data_writer.writerow([sex, match_sex_map[sex]])
for education in education_map:
data_writer.writerow([education, match_education_map[education]])
for country in birth_country_map:
data_writer.writerow([country, match_birth_country_map[country]])
# Fixes up the keywords over time
keywordsOverTime = keyword_to_dates
all_years = []
for k, v in keywordsOverTime.items():
all_years += v.keys()
all_years = list(set(all_years))
all_years.sort()
newKeywordsOverTime = {}
for k, v in keywordsOverTime.items():
newKeywordsOverTime[k] = {}
for y in all_years:
newKeywordsOverTime[k][y] = v[y]
newKeywordsOverTime[k] = fill_years(newKeywordsOverTime[k], 1)
currRunJSON["keywords-over-time"] = newKeywordsOverTime
write_subcorpora(currRunJSON["runDirname"], filenames, content, all_matches.keys())
return all_matches
# Gets all the surrounding contexts for keyword matches in files.
def get_all_contexts(filenames, content, all_matches, currRunJSON):
keywordJSON = {}
for i in range(len(filenames)):
f = filenames[i]
if f not in all_matches:
continue
bolded_contexts = []
matches = all_matches[f]
c = content[i]
matches = sorted(matches, key=lambda x: x[0])
for j in range(len(matches)):
m = matches[j]
loc = m[0]
before = m[1]
word = m[2]
after = m[3]
cJSON = {
"id": str(j) + "-" + f,
"keywordContext": [before, word, after],
"flagged": False,
"falseHit": False
}
bolded_contexts.append(cJSON)
keywordJSON[f] = bolded_contexts
currRunJSON["keyword-contexts"] = keywordJSON
# Creates one new run with one collection and one keyword list
def create_new_run(c, k, metadata, runJSON):
k["id"] = k["name"] + "-" + k["version"]
print_message("progress-message", "Creating run for " + c["id"] + " and " + k["id"])
currRunId = c["id"] + "-" + k["id"]
currRunJSON = {
"id": currRunId,
"collection": c["id"],
"keyword-list": k["id"],
"runDirname": runJSON["runDirname"] + "/" + currRunId
}
#print_message("m", metadata)
all_matches = find_keywords(metadata["files_for_inclusion"][c["id"]], c["filenames"], c["content"], k["include"], k["included_regexes"], k["excluded_regexes"], metadata["interview_years_by_file"][c["id"]], metadata["people"], metadata["interviews_to_interviewees"], runJSON, currRunJSON)
get_all_contexts(c["filenames"], c["content"], all_matches, currRunJSON)
num_with_keywords = currRunJSON["total-interviews-with-keywords"]
if num_with_keywords > 0:
runJSON["summary-report"]["total-collections-with-keywords"] += 1
runJSON["summary-report"]["total-interviews-with-keywords"] += num_with_keywords
runJSON["summary-report"]["total-keywords-found"] += currRunJSON["total-keywords-found"]
runJSON["individual-reports"][currRunId] = currRunJSON
def main():
runJSON = {} # Final JSON object that contains this run information
collections, keywords, keyword_regexes, metadata, runDirname = set_up(runJSON)
runJSON["individual-reports"] = {}
progressPerRun = int(95/(len(collections) * len(keywords)))
totalProgress = 5
for c in collections:
for k in keywords:
create_new_run(c, k, metadata, runJSON)
totalProgress += progressPerRun
print_message("progress", totalProgress)
# Fixes up the keywords over time
keywordsOverTime = runJSON["summary-report"]["keywords-over-time"]
all_years = []
for k, v in keywordsOverTime.items():
all_years += v.keys()
all_years = list(set(all_years))
all_years.sort()
newKeywordsOverTime = {}
for k, v in keywordsOverTime.items():
newKeywordsOverTime[k] = {}
for y in all_years:
newKeywordsOverTime[k][y] = v[y]
newKeywordsOverTime[k] = fill_years(newKeywordsOverTime[k], 1)
runJSON["summary-report"]["keywords-over-time"] = newKeywordsOverTime
for word in newKeywordsOverTime:
with open(str(word)+'timeusage.csv', 'w') as csvfile:
data_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
for year in newKeywordsOverTime[word]:
data_writer.writerow([year, newKeywordsOverTime[word][year]])
with open(data_dirname + "run.json", "w") as f:
f.write(json.dumps(runJSON))
print_message("progress", 100)
if __name__ == '__main__':
main()
| StarcoderdataPython |
1666613 | <filename>setup.py<gh_stars>1-10
from setuptools import setup
setup(name='Linear_Congruential_Generator',
version='0.1.5',
description="""The random number generator.
""",
long_description="""
# Linear Congruential Generator |  
# Installing
```console
pip install Linear-Congruential-Generator
```
# Import
```python
from lcg import LCG
```
""",
long_description_content_type='text/markdown',
url='https://github.com/onuratakan/Linear_Congruential_Generator',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=["lcg"],
package_dir={'':'src'},
python_requires='>=3',
zip_safe=False) | StarcoderdataPython |
51523 | <gh_stars>0
from django.contrib import admin
from characters.models import Character
# Register your models here.
admin.site.register(Character)
| StarcoderdataPython |
100774 | <filename>predictive/MLManager.py
import sys
import os
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from data.DataManager import DataManager
class MLManager:
@staticmethod
def logistic_regression(data):
X_train, X_test, y_train, y_test = MLManager.split_train_test(data)
if MLManager._has_only_one_class(y_train):
return None, None, None, None, None
model = LogisticRegression(solver='lbfgs')
model.fit(X_train, y_train)
return model, X_train, X_test, y_train, y_test
@staticmethod
def compate_output(outs_1, outs_2):
goods = 0
for out_1, out_2 in zip(outs_1, outs_2):
if out_1 == out_2:
goods += 1
return goods / len(outs_1)
@staticmethod
def split_train_test(data, x=['msm_gyro', 'msm_accelerometer'], y='PROBABLE', test_size=0.1):
return train_test_split(data[x], data[y], test_size=test_size)
@staticmethod
def _has_only_one_class(y):
return len(np.unique(y)) == 1
| StarcoderdataPython |
3284295 | from django.contrib import admin
from .models import User
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
list_display = ('username', 'email', 'is_student', 'is_teacher')
fields = ['username', 'email', 'is_student', 'is_teacher']
| StarcoderdataPython |
4812612 | #!/usr/bin/env python
#
# MIT License
#
# Copyright (c) 2016 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# @author <NAME>
# @file lds_tools.py Module for processing the output of a Neato Botvac LDS.
import numpy as np
import serial
class Scan(object):
"""
Class to hold one scan from the LDS.
"""
def __init__(self):
# Scan data.
self._distances = np.array([], dtype=np.float32)
self._angles = np.array([], dtype=np.float32)
self._intensities = np.array([], dtype=np.int32)
self._error_codes = np.array([], dtype=np.int32)
# Dirty flag.
self._unsorted = True
# The max range for the Botvac LDS.
# TODO: Should be configurable.
self._distance_max = 5.
def _sort_measurements(self):
"""
Method to sort all of the measurements by angle.
"""
if self._unsorted:
sorted_ndxs = np.argsort(self._angles)
self._distances = self._distances[sorted_ndxs]
self._angles = self._angles[sorted_ndxs]
self._intensities = self._intensities[sorted_ndxs]
self._error_codes = self._error_codes[sorted_ndxs]
self._unsorted = False
def distances(self):
"""
Method to return the distance measurements in meters.
Returns:
np.array: Distance measurements.
"""
self._sort_measurements()
return self._distances
def angles(self):
"""
Method to return the measurement angles in radians.
Returns:
np.array: Measurement angles.
"""
self._sort_measurements()
return self._angles
def intensities(self):
"""
Method to return the measurements intensities.
Returns:
np.array: Measurement intensities.
"""
self._sort_measurements()
return self._intensities
def error_codes(self):
"""
Method to return the measurement error codes.
Returns:
np.array: Measurement error codes.
"""
self._sort_measurements()
return self._error_codes
def x(self):
"""
Method to get the X coordinates of the 2D point cloud.
Returns:
np.array: 1D array of points.
The order of the points is sorted by angle.
"""
self._sort_measurements()
return self._distances*np.cos(self._angles)
def y(self):
"""
Method to get the Y coordinates of the 2D point cloud.
Returns:
np.array: 1D array of points.
The order of the points is sorted by angle.
"""
self._sort_measurements()
return self._distances*np.sin(self._angles)
def points(self):
"""
Method to get the 2D point cloud in cartesian coordinates.
Returns:
np.array: 2D array of points whose np.shape() = (2, n)
The first row contains all of the x coordinates.
The second row contains all of the y coordinates.
The order of the points is sorted by angle.
"""
return np.vstack((self.x(), self.y()))
def add_measurement(self,
distance,
angle,
intensity=0,
error_code=0,
linear_unit='millimeters',
angular_unit='degrees'):
"""
Method to add a new measurement to the scan.
Args:
distance (float): Distance measured.
angle (float): Angle at which the measurement was taken.
intensity (int): Intensity of the reading.
error_code (int): Error code for the reading.
linear_unit (str): Unit of the distance measurement.
Default is millimeters.
Available options are:
millimeters
meters
angular_unit (str): Unit of the angle. Default is degrees.
Available options are:
degrees
radians
"""
self._unsorted = True
acceptable_lu = ['meters', 'millimeters']
acceptable_au = ['degrees', 'radians']
if linear_unit not in acceptable_lu:
raise ValueError('{0} is not an available linear unit.'
.format(linear_unit))
elif angular_unit not in acceptable_au:
raise ValueError('{0} is not an available angular unit.'
.format(angular_unit))
if linear_unit == 'millimeters':
distance = distance/1000.
if distance > self._distance_max:
distance = 0
if angular_unit == 'degrees':
angle = angle*np.pi/180.
self._distances = np.append(self._distances, distance)
self._angles = np.append(self._angles, angle)
self._intensities = np.append(self._intensities, intensity)
self._error_codes = np.append(self._error_codes, error_code)
class ScanParser(object):
"""
Class for creating Scan objects from strings and files.
"""
@staticmethod
def file_to_strings(filename):
"""
Function to create a list of strings. Each string contains the data
from one LDS scan.
Args:
filename (str): Name of the file containing the laser scan(s).
Returns:
list: List of scan strings.
"""
# Scan output list.
scans = list()
scan_string = str()
# Parsing tokens.
# TODO: Allow changing of tokens.
start_token = '<PASSWORD>'
end_token = '<PASSWORD>'
# Parsing flags.
append_to_str = False
# Parse file into scan strings.
with open(filename, 'rb') as scan_file:
for line in scan_file:
if start_token in line:
# If we hit the start token, start recording a new string.
scan_string = str()
append_to_str = True
elif end_token in line and append_to_str:
# If we hit the end token and we were recording then save.
scans.append(scan_string)
append_to_str = False
elif append_to_str:
scan_string += line
return scans
@staticmethod
def from_string(scan_string):
"""
Function to parse an LDS scan string.
Args:
scan_string (str): String containing the laser scan.
Returns:
Scan: The parsed Scan object.
"""
scan_lines = scan_string.split()
scan = Scan()
prev_angle = 'No previous angle.'
for line in scan_lines:
meas = line.split(',')
# If the measurement doesn't have the right number of elements
# something went wrong.
if len(meas) != 4:
raise ValueError('{0} could not be parsed.'.format(line))
angle = float(meas[0])
distance = float(meas[1])
intense = int(meas[2])
err = int(meas[3])
# All of the elements should be positive.
if distance < 0:
raise ValueError('{0} is an invalid distance.'
.format(distance))
if angle < 0 or angle > 359:
raise ValueError('{0} is an invalid angle.'.format(angle))
if intense < 0:
raise ValueError('{0} is an invalid intensity.'
.format(intense))
if err < 0:
raise ValueError('{0} is an invalid error code.'.format(err))
# The angles should always increase in value.
if prev_angle == 'No previous angle.':
prev_angle = angle
elif prev_angle >= angle:
raise ValueError(('{0} was not an increase from the previous'
' angle of {1}.').format(angle, prev_angle))
scan.add_measurement(distance, angle, intense, err)
return scan
@classmethod
def from_file(cls, filename):
"""
Function to produce scan objects from LDS scans in a file.
Args:
filename (str): Name of the file with scans.
Returns:
list: List of Scan objects.
"""
scans = list()
scan_strings = cls.file_to_strings(filename)
for scan_string in scan_strings:
try:
scans.append(cls.from_string(scan_string))
except Exception:
pass
return scans
@classmethod
def from_serial(cls, port_name, port_baud):
"""
Function to parse an LDS scan string.
Args:
port_name (str): Name of the serial port botvac is connected to.
port_baud (str): Speed of the serial port botvac is connected to.
Returns:
Scan: The parsed Scan object.
"""
scan = Scan()
# Open the serial port.
ser = serial.Serial(port_name, port_baud)
# Get scan.
ser.write('GetLDSScan\r')
# Parsing tokens.
start_token = '<PASSWORD>'
end_token = '<PASSWORD>'
line = ''
while start_token not in line:
line = ser.readline()
# Parse serial port data into scan strings.
read_port = True
scan_string = str()
while read_port:
line = ser.readline()
if end_token in line:
read_port = False
else:
scan_string += line
return cls.from_string(scan_string)
| StarcoderdataPython |
1731473 | <gh_stars>10-100
""" Data objects in group "Zone Airflow"
"""
from collections import OrderedDict
import logging
from pyidf.helper import DataObject
logger = logging.getLogger("pyidf")
logger.addHandler(logging.NullHandler())
class ZoneInfiltrationDesignFlowRate(DataObject):
""" Corresponds to IDD object `ZoneInfiltration:DesignFlowRate`
Infiltration is specified as a design level which is modified by a Schedule fraction, temperature difference and wind speed:
Infiltration=Idesign * FSchedule * (A + B*|(Tzone-Todb)| + C*WindSpd + D * WindSpd**2)
If you use a ZoneList in the Zone or ZoneList name field then this definition applies
to all the zones in the ZoneList.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'zone or zonelist name',
{'name': u'Zone or ZoneList Name',
'pyname': u'zone_or_zonelist_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'schedule name',
{'name': u'Schedule Name',
'pyname': u'schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design flow rate calculation method',
{'name': u'Design Flow Rate Calculation Method',
'pyname': u'design_flow_rate_calculation_method',
'default': u'Flow/Zone',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Flow/Zone',
u'Flow/Area',
u'Flow/ExteriorArea',
u'Flow/ExteriorWallArea',
u'AirChanges/Hour'],
'autocalculatable': False,
'type': 'alpha'}),
(u'design flow rate',
{'name': u'Design Flow Rate',
'pyname': u'design_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'flow per zone floor area',
{'name': u'Flow per Zone Floor Area',
'pyname': u'flow_per_zone_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'flow per exterior surface area',
{'name': u'Flow per Exterior Surface Area',
'pyname': u'flow_per_exterior_surface_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'air changes per hour',
{'name': u'Air Changes per Hour',
'pyname': u'air_changes_per_hour',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'1/hr'}),
(u'constant term coefficient',
{'name': u'Constant Term Coefficient',
'pyname': u'constant_term_coefficient',
'default': 1.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'temperature term coefficient',
{'name': u'Temperature Term Coefficient',
'pyname': u'temperature_term_coefficient',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'velocity term coefficient',
{'name': u'Velocity Term Coefficient',
'pyname': u'velocity_term_coefficient',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'velocity squared term coefficient',
{'name': u'Velocity Squared Term Coefficient',
'pyname': u'velocity_squared_term_coefficient',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 12,
'name': u'ZoneInfiltration:DesignFlowRate',
'pyname': u'ZoneInfiltrationDesignFlowRate',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def zone_or_zonelist_name(self):
"""field `Zone or ZoneList Name`
Args:
value (str): value for IDD Field `Zone or ZoneList Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_or_zonelist_name` or None if not set
"""
return self["Zone or ZoneList Name"]
@zone_or_zonelist_name.setter
def zone_or_zonelist_name(self, value=None):
"""Corresponds to IDD field `Zone or ZoneList Name`"""
self["Zone or ZoneList Name"] = value
@property
def schedule_name(self):
"""field `Schedule Name`
Args:
value (str): value for IDD Field `Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `schedule_name` or None if not set
"""
return self["Schedule Name"]
@schedule_name.setter
def schedule_name(self, value=None):
"""Corresponds to IDD field `Schedule Name`"""
self["Schedule Name"] = value
@property
def design_flow_rate_calculation_method(self):
"""field `Design Flow Rate Calculation Method`
| The entered calculation method is used to create the maximum amount of infiltration
| for this set of attributes
| Choices: Flow/Zone => Design Flow Rate -- simply enter Design Flow Rate
| Flow/Area => Flow per Zone Floor Area - Value * Floor Area (zone) = Design Flow Rate
| Flow/ExteriorArea => Flow per Exterior Surface Area - Value * Exterior Surface Area (zone) = Design Flow Rate
| Flow/ExteriorWallArea => Flow per Exterior Surface Area - Value * Exterior Wall Surface Area (zone) = Design Flow Rate
| AirChanges/Hour => Air Changes per Hour - Value * Floor Volume (zone) adjusted for m3/s = Design Volume Flow Rate
| "Idesign" in Equation is the result.
| Default value: Flow/Zone
Args:
value (str): value for IDD Field `Design Flow Rate Calculation Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_flow_rate_calculation_method` or None if not set
"""
return self["Design Flow Rate Calculation Method"]
@design_flow_rate_calculation_method.setter
def design_flow_rate_calculation_method(self, value="Flow/Zone"):
"""Corresponds to IDD field `Design Flow Rate Calculation Method`"""
self["Design Flow Rate Calculation Method"] = value
@property
def design_flow_rate(self):
"""field `Design Flow Rate`
| Units: m3/s
| IP-Units: ft3/min
Args:
value (float): value for IDD Field `Design Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `design_flow_rate` or None if not set
"""
return self["Design Flow Rate"]
@design_flow_rate.setter
def design_flow_rate(self, value=None):
"""Corresponds to IDD field `Design Flow Rate`"""
self["Design Flow Rate"] = value
@property
def flow_per_zone_floor_area(self):
"""field `Flow per Zone Floor Area`
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Flow per Zone Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `flow_per_zone_floor_area` or None if not set
"""
return self["Flow per Zone Floor Area"]
@flow_per_zone_floor_area.setter
def flow_per_zone_floor_area(self, value=None):
"""Corresponds to IDD field `Flow per Zone Floor Area`"""
self["Flow per Zone Floor Area"] = value
@property
def flow_per_exterior_surface_area(self):
"""field `Flow per Exterior Surface Area`
| use key Flow/ExteriorArea for all exterior surface area
| use key Flow/ExteriorWallArea to include only exterior wall area
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Flow per Exterior Surface Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `flow_per_exterior_surface_area` or None if not set
"""
return self["Flow per Exterior Surface Area"]
@flow_per_exterior_surface_area.setter
def flow_per_exterior_surface_area(self, value=None):
"""Corresponds to IDD field `Flow per Exterior Surface Area`"""
self["Flow per Exterior Surface Area"] = value
@property
def air_changes_per_hour(self):
"""field `Air Changes per Hour`
| Units: 1/hr
Args:
value (float): value for IDD Field `Air Changes per Hour`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `air_changes_per_hour` or None if not set
"""
return self["Air Changes per Hour"]
@air_changes_per_hour.setter
def air_changes_per_hour(self, value=None):
"""Corresponds to IDD field `Air Changes per Hour`"""
self["Air Changes per Hour"] = value
@property
def constant_term_coefficient(self):
"""field `Constant Term Coefficient`
| "A" in Equation
| Default value: 1.0
Args:
value (float): value for IDD Field `Constant Term Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `constant_term_coefficient` or None if not set
"""
return self["Constant Term Coefficient"]
@constant_term_coefficient.setter
def constant_term_coefficient(self, value=1.0):
"""Corresponds to IDD field `Constant Term Coefficient`"""
self["Constant Term Coefficient"] = value
@property
def temperature_term_coefficient(self):
"""field `Temperature Term Coefficient`
| "B" in Equation
Args:
value (float): value for IDD Field `Temperature Term Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_term_coefficient` or None if not set
"""
return self["Temperature Term Coefficient"]
@temperature_term_coefficient.setter
def temperature_term_coefficient(self, value=None):
"""Corresponds to IDD field `Temperature Term Coefficient`"""
self["Temperature Term Coefficient"] = value
@property
def velocity_term_coefficient(self):
"""field `Velocity Term Coefficient`
| "C" in Equation
Args:
value (float): value for IDD Field `Velocity Term Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `velocity_term_coefficient` or None if not set
"""
return self["Velocity Term Coefficient"]
@velocity_term_coefficient.setter
def velocity_term_coefficient(self, value=None):
"""Corresponds to IDD field `Velocity Term Coefficient`"""
self["Velocity Term Coefficient"] = value
@property
def velocity_squared_term_coefficient(self):
"""field `Velocity Squared Term Coefficient`
| "D" in Equation
Args:
value (float): value for IDD Field `Velocity Squared Term Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `velocity_squared_term_coefficient` or None if not set
"""
return self["Velocity Squared Term Coefficient"]
@velocity_squared_term_coefficient.setter
def velocity_squared_term_coefficient(self, value=None):
"""Corresponds to IDD field `Velocity Squared Term Coefficient`"""
self["Velocity Squared Term Coefficient"] = value
class ZoneInfiltrationEffectiveLeakageArea(DataObject):
""" Corresponds to IDD object `ZoneInfiltration:EffectiveLeakageArea`
Infiltration is specified as effective leakage area at 4 Pa, schedule fraction, stack and wind coefficients, and
is a function of temperature difference and wind speed:
Infiltration=FSchedule * (AL /1000) SQRT(Cs*|(Tzone-Todb)| + Cw*WindSpd**2 )
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'zone name',
{'name': u'Zone Name',
'pyname': u'zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'schedule name',
{'name': u'Schedule Name',
'pyname': u'schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'effective air leakage area',
{'name': u'Effective Air Leakage Area',
'pyname': u'effective_air_leakage_area',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'cm2'}),
(u'stack coefficient',
{'name': u'Stack Coefficient',
'pyname': u'stack_coefficient',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'wind coefficient',
{'name': u'Wind Coefficient',
'pyname': u'wind_coefficient',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 6,
'name': u'ZoneInfiltration:EffectiveLeakageArea',
'pyname': u'ZoneInfiltrationEffectiveLeakageArea',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def zone_name(self):
"""field `Zone Name`
Args:
value (str): value for IDD Field `Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_name` or None if not set
"""
return self["Zone Name"]
@zone_name.setter
def zone_name(self, value=None):
"""Corresponds to IDD field `Zone Name`"""
self["Zone Name"] = value
@property
def schedule_name(self):
"""field `Schedule Name`
Args:
value (str): value for IDD Field `Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `schedule_name` or None if not set
"""
return self["Schedule Name"]
@schedule_name.setter
def schedule_name(self, value=None):
"""Corresponds to IDD field `Schedule Name`"""
self["Schedule Name"] = value
@property
def effective_air_leakage_area(self):
"""field `Effective Air Leakage Area`
| "AL" in Equation
| units are cm2 (square centimeters)
| Units: cm2
Args:
value (float): value for IDD Field `Effective Air Leakage Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `effective_air_leakage_area` or None if not set
"""
return self["Effective Air Leakage Area"]
@effective_air_leakage_area.setter
def effective_air_leakage_area(self, value=None):
"""Corresponds to IDD field `Effective Air Leakage Area`"""
self["Effective Air Leakage Area"] = value
@property
def stack_coefficient(self):
"""field `Stack Coefficient`
| "Cs" in Equation
Args:
value (float): value for IDD Field `Stack Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `stack_coefficient` or None if not set
"""
return self["Stack Coefficient"]
@stack_coefficient.setter
def stack_coefficient(self, value=None):
"""Corresponds to IDD field `Stack Coefficient`"""
self["Stack Coefficient"] = value
@property
def wind_coefficient(self):
"""field `Wind Coefficient`
| "Cw" in Equation
Args:
value (float): value for IDD Field `Wind Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `wind_coefficient` or None if not set
"""
return self["Wind Coefficient"]
@wind_coefficient.setter
def wind_coefficient(self, value=None):
"""Corresponds to IDD field `Wind Coefficient`"""
self["Wind Coefficient"] = value
class ZoneInfiltrationFlowCoefficient(DataObject):
""" Corresponds to IDD object `ZoneInfiltration:FlowCoefficient`
Infiltration is specified as flow coefficient, schedule fraction, stack and wind coefficients, and
is a function of temperature difference and wind speed:
Infiltration=FSchedule * SQRT( (c * Cs*|(Tzone-Todb)|**n)**2 + (c* Cw*(s * WindSpd)**2n)**2 )
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'zone name',
{'name': u'Zone Name',
'pyname': u'zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'schedule name',
{'name': u'Schedule Name',
'pyname': u'schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'flow coefficient',
{'name': u'Flow Coefficient',
'pyname': u'flow_coefficient',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'stack coefficient',
{'name': u'Stack Coefficient',
'pyname': u'stack_coefficient',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'pressure exponent',
{'name': u'Pressure Exponent',
'pyname': u'pressure_exponent',
'default': 0.67,
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'wind coefficient',
{'name': u'Wind Coefficient',
'pyname': u'wind_coefficient',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'shelter factor',
{'name': u'Shelter Factor',
'pyname': u'shelter_factor',
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 8,
'name': u'ZoneInfiltration:FlowCoefficient',
'pyname': u'ZoneInfiltrationFlowCoefficient',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def zone_name(self):
"""field `Zone Name`
Args:
value (str): value for IDD Field `Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_name` or None if not set
"""
return self["Zone Name"]
@zone_name.setter
def zone_name(self, value=None):
"""Corresponds to IDD field `Zone Name`"""
self["Zone Name"] = value
@property
def schedule_name(self):
"""field `Schedule Name`
Args:
value (str): value for IDD Field `Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `schedule_name` or None if not set
"""
return self["Schedule Name"]
@schedule_name.setter
def schedule_name(self, value=None):
"""Corresponds to IDD field `Schedule Name`"""
self["Schedule Name"] = value
@property
def flow_coefficient(self):
"""field `Flow Coefficient`
| "c" in Equation
Args:
value (float): value for IDD Field `Flow Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `flow_coefficient` or None if not set
"""
return self["Flow Coefficient"]
@flow_coefficient.setter
def flow_coefficient(self, value=None):
"""Corresponds to IDD field `Flow Coefficient`"""
self["Flow Coefficient"] = value
@property
def stack_coefficient(self):
"""field `Stack Coefficient`
| "Cs" in Equation
Args:
value (float): value for IDD Field `Stack Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `stack_coefficient` or None if not set
"""
return self["Stack Coefficient"]
@stack_coefficient.setter
def stack_coefficient(self, value=None):
"""Corresponds to IDD field `Stack Coefficient`"""
self["Stack Coefficient"] = value
@property
def pressure_exponent(self):
"""field `Pressure Exponent`
| "n" in Equation
| Default value: 0.67
Args:
value (float): value for IDD Field `Pressure Exponent`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `pressure_exponent` or None if not set
"""
return self["Pressure Exponent"]
@pressure_exponent.setter
def pressure_exponent(self, value=0.67):
"""Corresponds to IDD field `Pressure Exponent`"""
self["Pressure Exponent"] = value
@property
def wind_coefficient(self):
"""field `Wind Coefficient`
| "Cw" in Equation
Args:
value (float): value for IDD Field `Wind Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `wind_coefficient` or None if not set
"""
return self["Wind Coefficient"]
@wind_coefficient.setter
def wind_coefficient(self, value=None):
"""Corresponds to IDD field `Wind Coefficient`"""
self["Wind Coefficient"] = value
@property
def shelter_factor(self):
"""field `Shelter Factor`
| "s" in Equation
Args:
value (float): value for IDD Field `Shelter Factor`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `shelter_factor` or None if not set
"""
return self["Shelter Factor"]
@shelter_factor.setter
def shelter_factor(self, value=None):
"""Corresponds to IDD field `Shelter Factor`"""
self["Shelter Factor"] = value
class ZoneVentilationDesignFlowRate(DataObject):
""" Corresponds to IDD object `ZoneVentilation:DesignFlowRate`
Ventilation is specified as a design level which is modified by a schedule fraction, temperature difference and wind speed:
Ventilation=Vdesign * Fschedule * (A + B*|(Tzone-Todb)| + C*WindSpd + D * WindSpd**2)
If you use a ZoneList in the Zone or ZoneList name field then this definition applies
to all the zones in the ZoneList.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'zone or zonelist name',
{'name': u'Zone or ZoneList Name',
'pyname': u'zone_or_zonelist_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'schedule name',
{'name': u'Schedule Name',
'pyname': u'schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design flow rate calculation method',
{'name': u'Design Flow Rate Calculation Method',
'pyname': u'design_flow_rate_calculation_method',
'default': u'Flow/Zone',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Flow/Zone',
u'Flow/Area',
u'Flow/Person',
u'AirChanges/Hour'],
'autocalculatable': False,
'type': 'alpha'}),
(u'design flow rate',
{'name': u'Design Flow Rate',
'pyname': u'design_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'flow rate per zone floor area',
{'name': u'Flow Rate per Zone Floor Area',
'pyname': u'flow_rate_per_zone_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'flow rate per person',
{'name': u'Flow Rate per Person',
'pyname': u'flow_rate_per_person',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-person'}),
(u'air changes per hour',
{'name': u'Air Changes per Hour',
'pyname': u'air_changes_per_hour',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'1/hr'}),
(u'ventilation type',
{'name': u'Ventilation Type',
'pyname': u'ventilation_type',
'default': u'Natural',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Natural',
u'Intake',
u'Exhaust',
u'Balanced'],
'autocalculatable': False,
'type': 'alpha'}),
(u'fan pressure rise',
{'name': u'Fan Pressure Rise',
'pyname': u'fan_pressure_rise',
'default': 0.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'Pa'}),
(u'fan total efficiency',
{'name': u'Fan Total Efficiency',
'pyname': u'fan_total_efficiency',
'default': 1.0,
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'constant term coefficient',
{'name': u'Constant Term Coefficient',
'pyname': u'constant_term_coefficient',
'default': 1.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'temperature term coefficient',
{'name': u'Temperature Term Coefficient',
'pyname': u'temperature_term_coefficient',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'velocity term coefficient',
{'name': u'Velocity Term Coefficient',
'pyname': u'velocity_term_coefficient',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'velocity squared term coefficient',
{'name': u'Velocity Squared Term Coefficient',
'pyname': u'velocity_squared_term_coefficient',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'minimum indoor temperature',
{'name': u'Minimum Indoor Temperature',
'pyname': u'minimum_indoor_temperature',
'default': -100.0,
'maximum': 100.0,
'required-field': False,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'minimum indoor temperature schedule name',
{'name': u'Minimum Indoor Temperature Schedule Name',
'pyname': u'minimum_indoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum indoor temperature',
{'name': u'Maximum Indoor Temperature',
'pyname': u'maximum_indoor_temperature',
'default': 100.0,
'maximum': 100.0,
'required-field': False,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum indoor temperature schedule name',
{'name': u'Maximum Indoor Temperature Schedule Name',
'pyname': u'maximum_indoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'delta temperature',
{'name': u'Delta Temperature',
'pyname': u'delta_temperature',
'default': -100.0,
'required-field': False,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deltaC'}),
(u'delta temperature schedule name',
{'name': u'Delta Temperature Schedule Name',
'pyname': u'delta_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'minimum outdoor temperature',
{'name': u'Minimum Outdoor Temperature',
'pyname': u'minimum_outdoor_temperature',
'default': -100.0,
'maximum': 100.0,
'required-field': False,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'minimum outdoor temperature schedule name',
{'name': u'Minimum Outdoor Temperature Schedule Name',
'pyname': u'minimum_outdoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum outdoor temperature',
{'name': u'Maximum Outdoor Temperature',
'pyname': u'maximum_outdoor_temperature',
'default': 100.0,
'maximum': 100.0,
'required-field': False,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum outdoor temperature schedule name',
{'name': u'Maximum Outdoor Temperature Schedule Name',
'pyname': u'maximum_outdoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum wind speed',
{'name': u'Maximum Wind Speed',
'pyname': u'maximum_wind_speed',
'default': 40.0,
'maximum': 40.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm/s'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 15,
'name': u'ZoneVentilation:DesignFlowRate',
'pyname': u'ZoneVentilationDesignFlowRate',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def zone_or_zonelist_name(self):
"""field `Zone or ZoneList Name`
Args:
value (str): value for IDD Field `Zone or ZoneList Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_or_zonelist_name` or None if not set
"""
return self["Zone or ZoneList Name"]
@zone_or_zonelist_name.setter
def zone_or_zonelist_name(self, value=None):
"""Corresponds to IDD field `Zone or ZoneList Name`"""
self["Zone or ZoneList Name"] = value
@property
def schedule_name(self):
"""field `Schedule Name`
Args:
value (str): value for IDD Field `Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `schedule_name` or None if not set
"""
return self["Schedule Name"]
@schedule_name.setter
def schedule_name(self, value=None):
"""Corresponds to IDD field `Schedule Name`"""
self["Schedule Name"] = value
@property
def design_flow_rate_calculation_method(self):
"""field `Design Flow Rate Calculation Method`
| The entered calculation method is used to create the maximum amount of ventilation
| for this set of attributes
| Choices: Flow/Zone => Design Flow Rate -- simply enter Design Flow Rate
| Flow/Area => Flow Rate per Zone Floor Area - Value * Floor Area (zone) = Design Flow Rate
| Flow/Person => Flow Rate per Person - Value * #people = Design Flow Rate
| AirChanges/Hour => Air Changes per Hour - Value * Floor Volume (zone) adjusted for m3/s = Design Volume Flow Rate
| "Vdesign" in Equation is the result.
| Default value: Flow/Zone
Args:
value (str): value for IDD Field `Design Flow Rate Calculation Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_flow_rate_calculation_method` or None if not set
"""
return self["Design Flow Rate Calculation Method"]
@design_flow_rate_calculation_method.setter
def design_flow_rate_calculation_method(self, value="Flow/Zone"):
"""Corresponds to IDD field `Design Flow Rate Calculation Method`"""
self["Design Flow Rate Calculation Method"] = value
@property
def design_flow_rate(self):
"""field `Design Flow Rate`
| Units: m3/s
Args:
value (float): value for IDD Field `Design Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `design_flow_rate` or None if not set
"""
return self["Design Flow Rate"]
@design_flow_rate.setter
def design_flow_rate(self, value=None):
"""Corresponds to IDD field `Design Flow Rate`"""
self["Design Flow Rate"] = value
@property
def flow_rate_per_zone_floor_area(self):
"""field `Flow Rate per Zone Floor Area`
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Flow Rate per Zone Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `flow_rate_per_zone_floor_area` or None if not set
"""
return self["Flow Rate per Zone Floor Area"]
@flow_rate_per_zone_floor_area.setter
def flow_rate_per_zone_floor_area(self, value=None):
"""Corresponds to IDD field `Flow Rate per Zone Floor Area`"""
self["Flow Rate per Zone Floor Area"] = value
@property
def flow_rate_per_person(self):
"""field `Flow Rate per Person`
| Units: m3/s-person
Args:
value (float): value for IDD Field `Flow Rate per Person`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `flow_rate_per_person` or None if not set
"""
return self["Flow Rate per Person"]
@flow_rate_per_person.setter
def flow_rate_per_person(self, value=None):
"""Corresponds to IDD field `Flow Rate per Person`"""
self["Flow Rate per Person"] = value
@property
def air_changes_per_hour(self):
"""field `Air Changes per Hour`
| Units: 1/hr
Args:
value (float): value for IDD Field `Air Changes per Hour`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `air_changes_per_hour` or None if not set
"""
return self["Air Changes per Hour"]
@air_changes_per_hour.setter
def air_changes_per_hour(self, value=None):
"""Corresponds to IDD field `Air Changes per Hour`"""
self["Air Changes per Hour"] = value
@property
def ventilation_type(self):
"""field `Ventilation Type`
| Default value: Natural
Args:
value (str): value for IDD Field `Ventilation Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `ventilation_type` or None if not set
"""
return self["Ventilation Type"]
@ventilation_type.setter
def ventilation_type(self, value="Natural"):
"""Corresponds to IDD field `Ventilation Type`"""
self["Ventilation Type"] = value
@property
def fan_pressure_rise(self):
"""field `Fan Pressure Rise`
| pressure rise across the fan
| Units: Pa
Args:
value (float): value for IDD Field `Fan Pressure Rise`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fan_pressure_rise` or None if not set
"""
return self["Fan Pressure Rise"]
@fan_pressure_rise.setter
def fan_pressure_rise(self, value=None):
"""Corresponds to IDD field `Fan Pressure Rise`"""
self["Fan Pressure Rise"] = value
@property
def fan_total_efficiency(self):
"""field `Fan Total Efficiency`
| Default value: 1.0
Args:
value (float): value for IDD Field `Fan Total Efficiency`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fan_total_efficiency` or None if not set
"""
return self["Fan Total Efficiency"]
@fan_total_efficiency.setter
def fan_total_efficiency(self, value=1.0):
"""Corresponds to IDD field `Fan Total Efficiency`"""
self["Fan Total Efficiency"] = value
@property
def constant_term_coefficient(self):
"""field `Constant Term Coefficient`
| "A" in Equation
| Default value: 1.0
Args:
value (float): value for IDD Field `Constant Term Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `constant_term_coefficient` or None if not set
"""
return self["Constant Term Coefficient"]
@constant_term_coefficient.setter
def constant_term_coefficient(self, value=1.0):
"""Corresponds to IDD field `Constant Term Coefficient`"""
self["Constant Term Coefficient"] = value
@property
def temperature_term_coefficient(self):
"""field `Temperature Term Coefficient`
| "B" in Equation
Args:
value (float): value for IDD Field `Temperature Term Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_term_coefficient` or None if not set
"""
return self["Temperature Term Coefficient"]
@temperature_term_coefficient.setter
def temperature_term_coefficient(self, value=None):
"""Corresponds to IDD field `Temperature Term Coefficient`"""
self["Temperature Term Coefficient"] = value
@property
def velocity_term_coefficient(self):
"""field `Velocity Term Coefficient`
| "C" in Equation
Args:
value (float): value for IDD Field `Velocity Term Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `velocity_term_coefficient` or None if not set
"""
return self["Velocity Term Coefficient"]
@velocity_term_coefficient.setter
def velocity_term_coefficient(self, value=None):
"""Corresponds to IDD field `Velocity Term Coefficient`"""
self["Velocity Term Coefficient"] = value
@property
def velocity_squared_term_coefficient(self):
"""field `Velocity Squared Term Coefficient`
| "D" in Equation
Args:
value (float): value for IDD Field `Velocity Squared Term Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `velocity_squared_term_coefficient` or None if not set
"""
return self["Velocity Squared Term Coefficient"]
@velocity_squared_term_coefficient.setter
def velocity_squared_term_coefficient(self, value=None):
"""Corresponds to IDD field `Velocity Squared Term Coefficient`"""
self["Velocity Squared Term Coefficient"] = value
@property
def minimum_indoor_temperature(self):
"""field `Minimum Indoor Temperature`
| this is the indoor temperature below which ventilation is shutoff
| Units: C
| Default value: -100.0
| value >= -100.0
| value <= 100.0
Args:
value (float): value for IDD Field `Minimum Indoor Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_indoor_temperature` or None if not set
"""
return self["Minimum Indoor Temperature"]
@minimum_indoor_temperature.setter
def minimum_indoor_temperature(self, value=-100.0):
"""Corresponds to IDD field `Minimum Indoor Temperature`"""
self["Minimum Indoor Temperature"] = value
@property
def minimum_indoor_temperature_schedule_name(self):
"""field `Minimum Indoor Temperature Schedule Name`
| This schedule contains the indoor temperature versus time below which
| ventilation is shutoff.
Args:
value (str): value for IDD Field `Minimum Indoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `minimum_indoor_temperature_schedule_name` or None if not set
"""
return self["Minimum Indoor Temperature Schedule Name"]
@minimum_indoor_temperature_schedule_name.setter
def minimum_indoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Minimum Indoor Temperature Schedule
Name`"""
self["Minimum Indoor Temperature Schedule Name"] = value
@property
def maximum_indoor_temperature(self):
"""field `Maximum Indoor Temperature`
| this is the indoor temperature above which ventilation is shutoff
| Units: C
| Default value: 100.0
| value >= -100.0
| value <= 100.0
Args:
value (float): value for IDD Field `Maximum Indoor Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_indoor_temperature` or None if not set
"""
return self["Maximum Indoor Temperature"]
@maximum_indoor_temperature.setter
def maximum_indoor_temperature(self, value=100.0):
"""Corresponds to IDD field `Maximum Indoor Temperature`"""
self["Maximum Indoor Temperature"] = value
@property
def maximum_indoor_temperature_schedule_name(self):
"""field `Maximum Indoor Temperature Schedule Name`
| This schedule contains the indoor temperature versus time above which
| ventilation is shutoff.
Args:
value (str): value for IDD Field `Maximum Indoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `maximum_indoor_temperature_schedule_name` or None if not set
"""
return self["Maximum Indoor Temperature Schedule Name"]
@maximum_indoor_temperature_schedule_name.setter
def maximum_indoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Maximum Indoor Temperature Schedule
Name`"""
self["Maximum Indoor Temperature Schedule Name"] = value
@property
def delta_temperature(self):
"""field `Delta Temperature`
| This is the temperature differential between indoor and outdoor below which ventilation is shutoff.
| If ((IndoorTemp - OutdoorTemp) < DeltaTemperature) then ventilation is not allowed.
| For example, if delta temperature is 2C, ventilation is assumed to be available if the outside air temperature
| is at least 2C cooler than the zone air temperature. The values for this field can include negative numbers.
| This allows ventilation to occur even if the outdoor temperature is above the indoor temperature.
| Units: deltaC
| Default value: -100.0
| value >= -100.0
Args:
value (float): value for IDD Field `Delta Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `delta_temperature` or None if not set
"""
return self["Delta Temperature"]
@delta_temperature.setter
def delta_temperature(self, value=-100.0):
"""Corresponds to IDD field `Delta Temperature`"""
self["Delta Temperature"] = value
@property
def delta_temperature_schedule_name(self):
"""field `Delta Temperature Schedule Name`
| This schedule contains the temperature differential between indoor and outdoor
| versus time below which ventilation is shutoff.
Args:
value (str): value for IDD Field `Delta Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `delta_temperature_schedule_name` or None if not set
"""
return self["Delta Temperature Schedule Name"]
@delta_temperature_schedule_name.setter
def delta_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Delta Temperature Schedule Name`"""
self["Delta Temperature Schedule Name"] = value
@property
def minimum_outdoor_temperature(self):
"""field `Minimum Outdoor Temperature`
| this is the outdoor temperature below which ventilation is shutoff
| Units: C
| Default value: -100.0
| value >= -100.0
| value <= 100.0
Args:
value (float): value for IDD Field `Minimum Outdoor Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_outdoor_temperature` or None if not set
"""
return self["Minimum Outdoor Temperature"]
@minimum_outdoor_temperature.setter
def minimum_outdoor_temperature(self, value=-100.0):
"""Corresponds to IDD field `Minimum Outdoor Temperature`"""
self["Minimum Outdoor Temperature"] = value
@property
def minimum_outdoor_temperature_schedule_name(self):
"""field `Minimum Outdoor Temperature Schedule Name`
| This schedule contains the outdoor temperature versus time below which
| ventilation is shutoff.
Args:
value (str): value for IDD Field `Minimum Outdoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `minimum_outdoor_temperature_schedule_name` or None if not set
"""
return self["Minimum Outdoor Temperature Schedule Name"]
@minimum_outdoor_temperature_schedule_name.setter
def minimum_outdoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Minimum Outdoor Temperature Schedule
Name`"""
self["Minimum Outdoor Temperature Schedule Name"] = value
@property
def maximum_outdoor_temperature(self):
"""field `Maximum Outdoor Temperature`
| this is the outdoor temperature above which ventilation is shutoff
| Units: C
| Default value: 100.0
| value >= -100.0
| value <= 100.0
Args:
value (float): value for IDD Field `Maximum Outdoor Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_outdoor_temperature` or None if not set
"""
return self["Maximum Outdoor Temperature"]
@maximum_outdoor_temperature.setter
def maximum_outdoor_temperature(self, value=100.0):
"""Corresponds to IDD field `Maximum Outdoor Temperature`"""
self["Maximum Outdoor Temperature"] = value
@property
def maximum_outdoor_temperature_schedule_name(self):
"""field `Maximum Outdoor Temperature Schedule Name`
| This schedule contains the outdoor temperature versus time above which
| ventilation is shutoff.
Args:
value (str): value for IDD Field `Maximum Outdoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `maximum_outdoor_temperature_schedule_name` or None if not set
"""
return self["Maximum Outdoor Temperature Schedule Name"]
@maximum_outdoor_temperature_schedule_name.setter
def maximum_outdoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Maximum Outdoor Temperature Schedule
Name`"""
self["Maximum Outdoor Temperature Schedule Name"] = value
@property
def maximum_wind_speed(self):
"""field `Maximum Wind Speed`
| this is the outdoor wind speed above which ventilation is shutoff
| Units: m/s
| Default value: 40.0
| value <= 40.0
Args:
value (float): value for IDD Field `Maximum Wind Speed`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_wind_speed` or None if not set
"""
return self["Maximum Wind Speed"]
@maximum_wind_speed.setter
def maximum_wind_speed(self, value=40.0):
"""Corresponds to IDD field `Maximum Wind Speed`"""
self["Maximum Wind Speed"] = value
class ZoneVentilationWindandStackOpenArea(DataObject):
""" Corresponds to IDD object `ZoneVentilation:WindandStackOpenArea`
This object is specified as natural ventilation driven by wind and stack effect only:
Ventilation Wind = Cw * Opening Area * Schedule * WindSpd
Ventilation Stack = Cd * Opening Area * Schedule * SQRT(2*g*DH*(|(Tzone-Todb)|/Tzone))
Total Ventilation = SQRT((Ventilation Wind)^2 + (Ventilation Stack)^2)
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'zone name',
{'name': u'Zone Name',
'pyname': u'zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'opening area',
{'name': u'Opening Area',
'pyname': u'opening_area',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'opening area fraction schedule name',
{'name': u'Opening Area Fraction Schedule Name',
'pyname': u'opening_area_fraction_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'opening effectiveness',
{'name': u'Opening Effectiveness',
'pyname': u'opening_effectiveness',
'default': 'Autocalculate',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': True,
'type': u'real',
'unit': u'dimensionless'}),
(u'effective angle',
{'name': u'Effective Angle',
'pyname': u'effective_angle',
'default': 0.0,
'maximum<': 360.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deg'}),
(u'height difference',
{'name': u'Height Difference',
'pyname': u'height_difference',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'discharge coefficient for opening',
{'name': u'Discharge Coefficient for Opening',
'pyname': u'discharge_coefficient_for_opening',
'default': 'Autocalculate',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': True,
'type': 'real'}),
(u'minimum indoor temperature',
{'name': u'Minimum Indoor Temperature',
'pyname': u'minimum_indoor_temperature',
'default': -100.0,
'maximum': 100.0,
'required-field': False,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'minimum indoor temperature schedule name',
{'name': u'Minimum Indoor Temperature Schedule Name',
'pyname': u'minimum_indoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum indoor temperature',
{'name': u'Maximum Indoor Temperature',
'pyname': u'maximum_indoor_temperature',
'default': 100.0,
'maximum': 100.0,
'required-field': False,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum indoor temperature schedule name',
{'name': u'Maximum Indoor Temperature Schedule Name',
'pyname': u'maximum_indoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'delta temperature',
{'name': u'Delta Temperature',
'pyname': u'delta_temperature',
'default': -100.0,
'required-field': False,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deltaC'}),
(u'delta temperature schedule name',
{'name': u'Delta Temperature Schedule Name',
'pyname': u'delta_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'minimum outdoor temperature',
{'name': u'Minimum Outdoor Temperature',
'pyname': u'minimum_outdoor_temperature',
'default': -100.0,
'maximum': 100.0,
'required-field': False,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'minimum outdoor temperature schedule name',
{'name': u'Minimum Outdoor Temperature Schedule Name',
'pyname': u'minimum_outdoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum outdoor temperature',
{'name': u'Maximum Outdoor Temperature',
'pyname': u'maximum_outdoor_temperature',
'default': 100.0,
'maximum': 100.0,
'required-field': False,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum outdoor temperature schedule name',
{'name': u'Maximum Outdoor Temperature Schedule Name',
'pyname': u'maximum_outdoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum wind speed',
{'name': u'Maximum Wind Speed',
'pyname': u'maximum_wind_speed',
'default': 40.0,
'maximum': 40.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm/s'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 8,
'name': u'ZoneVentilation:WindandStackOpenArea',
'pyname': u'ZoneVentilationWindandStackOpenArea',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def zone_name(self):
"""field `Zone Name`
Args:
value (str): value for IDD Field `Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_name` or None if not set
"""
return self["Zone Name"]
@zone_name.setter
def zone_name(self, value=None):
"""Corresponds to IDD field `Zone Name`"""
self["Zone Name"] = value
@property
def opening_area(self):
"""field `Opening Area`
| This is the opening area used to calculate stack effect and wind driven ventilation.
| Units: m2
Args:
value (float): value for IDD Field `Opening Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `opening_area` or None if not set
"""
return self["Opening Area"]
@opening_area.setter
def opening_area(self, value=None):
"""Corresponds to IDD field `Opening Area`"""
self["Opening Area"] = value
@property
def opening_area_fraction_schedule_name(self):
"""field `Opening Area Fraction Schedule Name`
| This schedule contains the fraction values applied to the opening area given in the previous
| input field (0.0 - 1.0).
Args:
value (str): value for IDD Field `Opening Area Fraction Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `opening_area_fraction_schedule_name` or None if not set
"""
return self["Opening Area Fraction Schedule Name"]
@opening_area_fraction_schedule_name.setter
def opening_area_fraction_schedule_name(self, value=None):
"""Corresponds to IDD field `Opening Area Fraction Schedule Name`"""
self["Opening Area Fraction Schedule Name"] = value
@property
def opening_effectiveness(self):
"""field `Opening Effectiveness`
| This field is used to calculate wind driven ventilation.
| "Cw" in the wind-driven equation and the maximum value is 1.0.
| When the input is Autocalculate, the program calculates Cw based on an angle between
| wind direction and effective angle
| Cw = 0.55 at angle = 0, and Cw = 0.3 at angle=180
| Linear interpolation is used to calculate Cw based on the above two values.
| Units: dimensionless
| Default value: "Autocalculate"
| value <= 1.0
Args:
value (float or "Autocalculate"): value for IDD Field `Opening Effectiveness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autocalculate": the value of `opening_effectiveness` or None if not set
"""
return self["Opening Effectiveness"]
@opening_effectiveness.setter
def opening_effectiveness(self, value="Autocalculate"):
"""Corresponds to IDD field `Opening Effectiveness`"""
self["Opening Effectiveness"] = value
@property
def effective_angle(self):
"""field `Effective Angle`
| This field is defined as normal angle of the opening area and is used when input
| field Opening Effectiveness = Autocalculate.
| Units: deg
| value < 360.0
Args:
value (float): value for IDD Field `Effective Angle`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `effective_angle` or None if not set
"""
return self["Effective Angle"]
@effective_angle.setter
def effective_angle(self, value=None):
"""Corresponds to IDD field `Effective Angle`"""
self["Effective Angle"] = value
@property
def height_difference(self):
"""field `Height Difference`
| This is the height difference between the midpoint of an opening and
| the neutral pressure level.
| "DH" in the stack equation.
| Units: m
Args:
value (float): value for IDD Field `Height Difference`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `height_difference` or None if not set
"""
return self["Height Difference"]
@height_difference.setter
def height_difference(self, value=None):
"""Corresponds to IDD field `Height Difference`"""
self["Height Difference"] = value
@property
def discharge_coefficient_for_opening(self):
"""field `Discharge Coefficient for Opening`
| This is the discharge coefficient used to calculate stack effect.
| "Cd" in the stack equation and the maximum value is 1.0.
| When the input is Autocalculate, the following equation is used to calculate the
| coefficient:
| Cd = 0.4 + 0.0045*|(Tzone-Todb)|
| Default value: "Autocalculate"
| value <= 1.0
Args:
value (float or "Autocalculate"): value for IDD Field `Discharge Coefficient for Opening`
Raises:
ValueError: if `value` is not a valid value
Returns:
float or "Autocalculate": the value of `discharge_coefficient_for_opening` or None if not set
"""
return self["Discharge Coefficient for Opening"]
@discharge_coefficient_for_opening.setter
def discharge_coefficient_for_opening(self, value="Autocalculate"):
"""Corresponds to IDD field `Discharge Coefficient for Opening`"""
self["Discharge Coefficient for Opening"] = value
@property
def minimum_indoor_temperature(self):
"""field `Minimum Indoor Temperature`
| This is the indoor temperature below which ventilation is shutoff.
| Units: C
| Default value: -100.0
| value >= -100.0
| value <= 100.0
Args:
value (float): value for IDD Field `Minimum Indoor Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_indoor_temperature` or None if not set
"""
return self["Minimum Indoor Temperature"]
@minimum_indoor_temperature.setter
def minimum_indoor_temperature(self, value=-100.0):
"""Corresponds to IDD field `Minimum Indoor Temperature`"""
self["Minimum Indoor Temperature"] = value
@property
def minimum_indoor_temperature_schedule_name(self):
"""field `Minimum Indoor Temperature Schedule Name`
| This schedule contains the indoor temperature versus time below which
| ventilation is shutoff.
Args:
value (str): value for IDD Field `Minimum Indoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `minimum_indoor_temperature_schedule_name` or None if not set
"""
return self["Minimum Indoor Temperature Schedule Name"]
@minimum_indoor_temperature_schedule_name.setter
def minimum_indoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Minimum Indoor Temperature Schedule
Name`"""
self["Minimum Indoor Temperature Schedule Name"] = value
@property
def maximum_indoor_temperature(self):
"""field `Maximum Indoor Temperature`
| This is the indoor temperature above which ventilation is shutoff.
| Units: C
| Default value: 100.0
| value >= -100.0
| value <= 100.0
Args:
value (float): value for IDD Field `Maximum Indoor Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_indoor_temperature` or None if not set
"""
return self["Maximum Indoor Temperature"]
@maximum_indoor_temperature.setter
def maximum_indoor_temperature(self, value=100.0):
"""Corresponds to IDD field `Maximum Indoor Temperature`"""
self["Maximum Indoor Temperature"] = value
@property
def maximum_indoor_temperature_schedule_name(self):
"""field `Maximum Indoor Temperature Schedule Name`
| This schedule contains the indoor temperature versus time above which
| ventilation is shutoff.
Args:
value (str): value for IDD Field `Maximum Indoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `maximum_indoor_temperature_schedule_name` or None if not set
"""
return self["Maximum Indoor Temperature Schedule Name"]
@maximum_indoor_temperature_schedule_name.setter
def maximum_indoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Maximum Indoor Temperature Schedule
Name`"""
self["Maximum Indoor Temperature Schedule Name"] = value
@property
def delta_temperature(self):
"""field `Delta Temperature`
| This is the temperature differential between indoor and outdoor below
| which ventilation is shutoff.
| Units: deltaC
| Default value: -100.0
| value >= -100.0
Args:
value (float): value for IDD Field `Delta Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `delta_temperature` or None if not set
"""
return self["Delta Temperature"]
@delta_temperature.setter
def delta_temperature(self, value=-100.0):
"""Corresponds to IDD field `Delta Temperature`"""
self["Delta Temperature"] = value
@property
def delta_temperature_schedule_name(self):
"""field `Delta Temperature Schedule Name`
| This schedule contains the temperature differential between indoor and outdoor
| versus time below which ventilation is shutoff.
Args:
value (str): value for IDD Field `Delta Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `delta_temperature_schedule_name` or None if not set
"""
return self["Delta Temperature Schedule Name"]
@delta_temperature_schedule_name.setter
def delta_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Delta Temperature Schedule Name`"""
self["Delta Temperature Schedule Name"] = value
@property
def minimum_outdoor_temperature(self):
"""field `Minimum Outdoor Temperature`
| This is the outdoor temperature below which ventilation is shutoff.
| Units: C
| Default value: -100.0
| value >= -100.0
| value <= 100.0
Args:
value (float): value for IDD Field `Minimum Outdoor Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_outdoor_temperature` or None if not set
"""
return self["Minimum Outdoor Temperature"]
@minimum_outdoor_temperature.setter
def minimum_outdoor_temperature(self, value=-100.0):
"""Corresponds to IDD field `Minimum Outdoor Temperature`"""
self["Minimum Outdoor Temperature"] = value
@property
def minimum_outdoor_temperature_schedule_name(self):
"""field `Minimum Outdoor Temperature Schedule Name`
| This schedule contains the outdoor temperature versus time below which
| ventilation is shutoff.
Args:
value (str): value for IDD Field `Minimum Outdoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `minimum_outdoor_temperature_schedule_name` or None if not set
"""
return self["Minimum Outdoor Temperature Schedule Name"]
@minimum_outdoor_temperature_schedule_name.setter
def minimum_outdoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Minimum Outdoor Temperature Schedule
Name`"""
self["Minimum Outdoor Temperature Schedule Name"] = value
@property
def maximum_outdoor_temperature(self):
"""field `Maximum Outdoor Temperature`
| This is the outdoor temperature above which ventilation is shutoff.
| Units: C
| Default value: 100.0
| value >= -100.0
| value <= 100.0
Args:
value (float): value for IDD Field `Maximum Outdoor Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_outdoor_temperature` or None if not set
"""
return self["Maximum Outdoor Temperature"]
@maximum_outdoor_temperature.setter
def maximum_outdoor_temperature(self, value=100.0):
"""Corresponds to IDD field `Maximum Outdoor Temperature`"""
self["Maximum Outdoor Temperature"] = value
@property
def maximum_outdoor_temperature_schedule_name(self):
"""field `Maximum Outdoor Temperature Schedule Name`
| This schedule contains the outdoor temperature versus time above which
| ventilation is shutoff.
Args:
value (str): value for IDD Field `Maximum Outdoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `maximum_outdoor_temperature_schedule_name` or None if not set
"""
return self["Maximum Outdoor Temperature Schedule Name"]
@maximum_outdoor_temperature_schedule_name.setter
def maximum_outdoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Maximum Outdoor Temperature Schedule
Name`"""
self["Maximum Outdoor Temperature Schedule Name"] = value
@property
def maximum_wind_speed(self):
"""field `Maximum Wind Speed`
| This is the outdoor wind speed above which ventilation is shutoff.
| Units: m/s
| Default value: 40.0
| value <= 40.0
Args:
value (float): value for IDD Field `Maximum Wind Speed`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_wind_speed` or None if not set
"""
return self["Maximum Wind Speed"]
@maximum_wind_speed.setter
def maximum_wind_speed(self, value=40.0):
"""Corresponds to IDD field `Maximum Wind Speed`"""
self["Maximum Wind Speed"] = value
class ZoneAirBalanceOutdoorAir(DataObject):
""" Corresponds to IDD object `ZoneAirBalance:OutdoorAir`
Provide a combined zone outdoor air flow by including interactions between
mechanical ventilation, infiltration and duct leakage.
This object will combine outdoor flows from all ZoneInfiltration and
ZoneVentilation objects in the same zone. Balanced flows will be summed, while
unbalanced flows will be added in quadrature.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'zone name',
{'name': u'Zone Name',
'pyname': u'zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'air balance method',
{'name': u'Air Balance Method',
'pyname': u'air_balance_method',
'default': u'Quadrature',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Quadrature',
u'None'],
'autocalculatable': False,
'type': 'alpha'}),
(u'induced outdoor air due to unbalanced duct leakage',
{'name': u'Induced Outdoor Air Due to Unbalanced Duct Leakage',
'pyname': u'induced_outdoor_air_due_to_unbalanced_duct_leakage',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'induced outdoor air schedule name',
{'name': u'Induced Outdoor Air Schedule Name',
'pyname': u'induced_outdoor_air_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 0,
'name': u'ZoneAirBalance:OutdoorAir',
'pyname': u'ZoneAirBalanceOutdoorAir',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def zone_name(self):
"""field `Zone Name`
Args:
value (str): value for IDD Field `Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_name` or None if not set
"""
return self["Zone Name"]
@zone_name.setter
def zone_name(self, value=None):
"""Corresponds to IDD field `Zone Name`"""
self["Zone Name"] = value
@property
def air_balance_method(self):
"""field `Air Balance Method`
| None: Only perform simple calculations without using a combined zone outdoor air.
| Quadrature: A combined outdoor air is used in the quadrature sum.
| Default value: Quadrature
Args:
value (str): value for IDD Field `Air Balance Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `air_balance_method` or None if not set
"""
return self["Air Balance Method"]
@air_balance_method.setter
def air_balance_method(self, value="Quadrature"):
"""Corresponds to IDD field `Air Balance Method`"""
self["Air Balance Method"] = value
@property
def induced_outdoor_air_due_to_unbalanced_duct_leakage(self):
"""field `Induced Outdoor Air Due to Unbalanced Duct Leakage`
| Units: m3/s
Args:
value (float): value for IDD Field `Induced Outdoor Air Due to Unbalanced Duct Leakage`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `induced_outdoor_air_due_to_unbalanced_duct_leakage` or None if not set
"""
return self["Induced Outdoor Air Due to Unbalanced Duct Leakage"]
@induced_outdoor_air_due_to_unbalanced_duct_leakage.setter
def induced_outdoor_air_due_to_unbalanced_duct_leakage(self, value=None):
"""Corresponds to IDD field `Induced Outdoor Air Due to Unbalanced Duct
Leakage`"""
self["Induced Outdoor Air Due to Unbalanced Duct Leakage"] = value
@property
def induced_outdoor_air_schedule_name(self):
"""field `Induced Outdoor Air Schedule Name`
| This schedule contains the fraction values applied to the Induced Outdoor Air given in the
| previous input field (0.0 - 1.0).
Args:
value (str): value for IDD Field `Induced Outdoor Air Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `induced_outdoor_air_schedule_name` or None if not set
"""
return self["Induced Outdoor Air Schedule Name"]
@induced_outdoor_air_schedule_name.setter
def induced_outdoor_air_schedule_name(self, value=None):
"""Corresponds to IDD field `Induced Outdoor Air Schedule Name`"""
self["Induced Outdoor Air Schedule Name"] = value
class ZoneMixing(DataObject):
"""Corresponds to IDD object `ZoneMixing` ZoneMixing is a simple air
exchange from one zone to another.
Note that this statement only affects the energy balance of the
"receiving" zone and will not produce any effect on the "source"
zone. Mixing statements can be complementary and include multiple
zones, but the balancing of flows between zones is left to the
user's discretion.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'zone name',
{'name': u'Zone Name',
'pyname': u'zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'schedule name',
{'name': u'Schedule Name',
'pyname': u'schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design flow rate calculation method',
{'name': u'Design Flow Rate Calculation Method',
'pyname': u'design_flow_rate_calculation_method',
'default': u'Flow/Zone',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Flow/Zone',
u'Flow/Area',
u'Flow/Person',
u'AirChanges/Hour'],
'autocalculatable': False,
'type': 'alpha'}),
(u'design flow rate',
{'name': u'Design Flow Rate',
'pyname': u'design_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'flow rate per zone floor area',
{'name': u'Flow Rate per Zone Floor Area',
'pyname': u'flow_rate_per_zone_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'flow rate per person',
{'name': u'Flow Rate per Person',
'pyname': u'flow_rate_per_person',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-person'}),
(u'air changes per hour',
{'name': u'Air Changes per Hour',
'pyname': u'air_changes_per_hour',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'1/hr'}),
(u'source zone name',
{'name': u'Source Zone Name',
'pyname': u'source_zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'delta temperature',
{'name': u'Delta Temperature',
'pyname': u'delta_temperature',
'default': 0.0,
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'deltaC'}),
(u'delta temperature schedule name',
{'name': u'Delta Temperature Schedule Name',
'pyname': u'delta_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'minimum zone temperature schedule name',
{'name': u'Minimum Zone Temperature Schedule Name',
'pyname': u'minimum_zone_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum zone temperature schedule name',
{'name': u'Maximum Zone Temperature Schedule Name',
'pyname': u'maximum_zone_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'minimum source zone temperature schedule name',
{'name': u'Minimum Source Zone Temperature Schedule Name',
'pyname': u'minimum_source_zone_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum source zone temperature schedule name',
{'name': u'Maximum Source Zone Temperature Schedule Name',
'pyname': u'maximum_source_zone_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'minimum outdoor temperature schedule name',
{'name': u'Minimum Outdoor Temperature Schedule Name',
'pyname': u'minimum_outdoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum outdoor temperature schedule name',
{'name': u'Maximum Outdoor Temperature Schedule Name',
'pyname': u'maximum_outdoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 9,
'name': u'ZoneMixing',
'pyname': u'ZoneMixing',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def zone_name(self):
"""field `Zone Name`
Args:
value (str): value for IDD Field `Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_name` or None if not set
"""
return self["Zone Name"]
@zone_name.setter
def zone_name(self, value=None):
"""Corresponds to IDD field `Zone Name`"""
self["Zone Name"] = value
@property
def schedule_name(self):
"""field `Schedule Name`
Args:
value (str): value for IDD Field `Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `schedule_name` or None if not set
"""
return self["Schedule Name"]
@schedule_name.setter
def schedule_name(self, value=None):
"""Corresponds to IDD field `Schedule Name`"""
self["Schedule Name"] = value
@property
def design_flow_rate_calculation_method(self):
"""field `Design Flow Rate Calculation Method`
| The entered calculation method is used to create the maximum amount of ventilation
| for this set of attributes
| Choices: Flow/Zone => Design Flow Rate -- simply enter Design Flow Rate
| Flow/Area => Flow Rate per Zone Floor Area - Value * Floor Area (zone) = Design Flow Rate
| Flow/Person => Flow Rate per Person - Value * #people = Design Flow Rate
| AirChanges/Hour => Air Changes per Hour - Value * Floor Volume (zone) adjusted for m3/s = Design Volume Flow Rate
| "Vdesign" in Equation is the result.
| Default value: Flow/Zone
Args:
value (str): value for IDD Field `Design Flow Rate Calculation Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_flow_rate_calculation_method` or None if not set
"""
return self["Design Flow Rate Calculation Method"]
@design_flow_rate_calculation_method.setter
def design_flow_rate_calculation_method(self, value="Flow/Zone"):
"""Corresponds to IDD field `Design Flow Rate Calculation Method`"""
self["Design Flow Rate Calculation Method"] = value
@property
def design_flow_rate(self):
"""field `Design Flow Rate`
| Units: m3/s
Args:
value (float): value for IDD Field `Design Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `design_flow_rate` or None if not set
"""
return self["Design Flow Rate"]
@design_flow_rate.setter
def design_flow_rate(self, value=None):
"""Corresponds to IDD field `Design Flow Rate`"""
self["Design Flow Rate"] = value
@property
def flow_rate_per_zone_floor_area(self):
"""field `Flow Rate per Zone Floor Area`
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Flow Rate per Zone Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `flow_rate_per_zone_floor_area` or None if not set
"""
return self["Flow Rate per Zone Floor Area"]
@flow_rate_per_zone_floor_area.setter
def flow_rate_per_zone_floor_area(self, value=None):
"""Corresponds to IDD field `Flow Rate per Zone Floor Area`"""
self["Flow Rate per Zone Floor Area"] = value
@property
def flow_rate_per_person(self):
"""field `Flow Rate per Person`
| Units: m3/s-person
Args:
value (float): value for IDD Field `Flow Rate per Person`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `flow_rate_per_person` or None if not set
"""
return self["Flow Rate per Person"]
@flow_rate_per_person.setter
def flow_rate_per_person(self, value=None):
"""Corresponds to IDD field `Flow Rate per Person`"""
self["Flow Rate per Person"] = value
@property
def air_changes_per_hour(self):
"""field `Air Changes per Hour`
| Units: 1/hr
Args:
value (float): value for IDD Field `Air Changes per Hour`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `air_changes_per_hour` or None if not set
"""
return self["Air Changes per Hour"]
@air_changes_per_hour.setter
def air_changes_per_hour(self, value=None):
"""Corresponds to IDD field `Air Changes per Hour`"""
self["Air Changes per Hour"] = value
@property
def source_zone_name(self):
"""field `Source Zone Name`
Args:
value (str): value for IDD Field `Source Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `source_zone_name` or None if not set
"""
return self["Source Zone Name"]
@source_zone_name.setter
def source_zone_name(self, value=None):
"""Corresponds to IDD field `Source Zone Name`"""
self["Source Zone Name"] = value
@property
def delta_temperature(self):
"""field `Delta Temperature`
| This field contains the constant temperature differential between source and
| receiving zones below which mixing is shutoff.
| Units: deltaC
Args:
value (float): value for IDD Field `Delta Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `delta_temperature` or None if not set
"""
return self["Delta Temperature"]
@delta_temperature.setter
def delta_temperature(self, value=None):
"""Corresponds to IDD field `Delta Temperature`"""
self["Delta Temperature"] = value
@property
def delta_temperature_schedule_name(self):
"""field `Delta Temperature Schedule Name`
| This schedule contains the temperature differential between source and receiving
| zones versus time below which mixing is shutoff.
Args:
value (str): value for IDD Field `Delta Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `delta_temperature_schedule_name` or None if not set
"""
return self["Delta Temperature Schedule Name"]
@delta_temperature_schedule_name.setter
def delta_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Delta Temperature Schedule Name`"""
self["Delta Temperature Schedule Name"] = value
@property
def minimum_zone_temperature_schedule_name(self):
"""field `Minimum Zone Temperature Schedule Name`
| This schedule contains the zone dry-bulb temperature versus time below which
| mixing is shutoff.
Args:
value (str): value for IDD Field `Minimum Zone Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `minimum_zone_temperature_schedule_name` or None if not set
"""
return self["Minimum Zone Temperature Schedule Name"]
@minimum_zone_temperature_schedule_name.setter
def minimum_zone_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Minimum Zone Temperature Schedule Name`"""
self["Minimum Zone Temperature Schedule Name"] = value
@property
def maximum_zone_temperature_schedule_name(self):
"""field `Maximum Zone Temperature Schedule Name`
| This schedule contains the zone dry-bulb temperature versus time above which
| mixing is shutoff.
Args:
value (str): value for IDD Field `Maximum Zone Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `maximum_zone_temperature_schedule_name` or None if not set
"""
return self["Maximum Zone Temperature Schedule Name"]
@maximum_zone_temperature_schedule_name.setter
def maximum_zone_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Maximum Zone Temperature Schedule Name`"""
self["Maximum Zone Temperature Schedule Name"] = value
@property
def minimum_source_zone_temperature_schedule_name(self):
"""field `Minimum Source Zone Temperature Schedule Name`
| This schedule contains the source zone dry-bulb temperature versus time below
| which mixing is shutoff.
Args:
value (str): value for IDD Field `Minimum Source Zone Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `minimum_source_zone_temperature_schedule_name` or None if not set
"""
return self["Minimum Source Zone Temperature Schedule Name"]
@minimum_source_zone_temperature_schedule_name.setter
def minimum_source_zone_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Minimum Source Zone Temperature Schedule
Name`"""
self["Minimum Source Zone Temperature Schedule Name"] = value
@property
def maximum_source_zone_temperature_schedule_name(self):
"""field `Maximum Source Zone Temperature Schedule Name`
| This schedule contains the source zone dry-bulb temperature versus time above
| which mixing is shutoff.
Args:
value (str): value for IDD Field `Maximum Source Zone Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `maximum_source_zone_temperature_schedule_name` or None if not set
"""
return self["Maximum Source Zone Temperature Schedule Name"]
@maximum_source_zone_temperature_schedule_name.setter
def maximum_source_zone_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Maximum Source Zone Temperature Schedule
Name`"""
self["Maximum Source Zone Temperature Schedule Name"] = value
@property
def minimum_outdoor_temperature_schedule_name(self):
"""field `Minimum Outdoor Temperature Schedule Name`
| This schedule contains the outdoor temperature versus time below which
| mixing is shutoff.
Args:
value (str): value for IDD Field `Minimum Outdoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `minimum_outdoor_temperature_schedule_name` or None if not set
"""
return self["Minimum Outdoor Temperature Schedule Name"]
@minimum_outdoor_temperature_schedule_name.setter
def minimum_outdoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Minimum Outdoor Temperature Schedule
Name`"""
self["Minimum Outdoor Temperature Schedule Name"] = value
@property
def maximum_outdoor_temperature_schedule_name(self):
"""field `Maximum Outdoor Temperature Schedule Name`
| This schedule contains the outdoor temperature versus time above which
| mixing is shutoff.
Args:
value (str): value for IDD Field `Maximum Outdoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `maximum_outdoor_temperature_schedule_name` or None if not set
"""
return self["Maximum Outdoor Temperature Schedule Name"]
@maximum_outdoor_temperature_schedule_name.setter
def maximum_outdoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Maximum Outdoor Temperature Schedule
Name`"""
self["Maximum Outdoor Temperature Schedule Name"] = value
class ZoneCrossMixing(DataObject):
"""Corresponds to IDD object `ZoneCrossMixing` ZoneCrossMixing exchanges an
equal amount of air between two zones.
Note that this statement affects the energy balance of both zones.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'zone name',
{'name': u'Zone Name',
'pyname': u'zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'schedule name',
{'name': u'Schedule Name',
'pyname': u'schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design flow rate calculation method',
{'name': u'Design Flow Rate Calculation Method',
'pyname': u'design_flow_rate_calculation_method',
'default': u'Flow/Zone',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Flow/Zone',
u'Flow/Person',
u'Flow/Area',
u'AirChanges/Hour'],
'autocalculatable': False,
'type': 'alpha'}),
(u'design flow rate',
{'name': u'Design Flow Rate',
'pyname': u'design_flow_rate',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'flow rate per zone floor area',
{'name': u'Flow Rate per Zone Floor Area',
'pyname': u'flow_rate_per_zone_floor_area',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-m2'}),
(u'flow rate per person',
{'name': u'Flow Rate per Person',
'pyname': u'flow_rate_per_person',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s-person'}),
(u'air changes per hour',
{'name': u'Air Changes per Hour',
'pyname': u'air_changes_per_hour',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'1/hr'}),
(u'source zone name',
{'name': u'Source Zone Name',
'pyname': u'source_zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'delta temperature',
{'name': u'Delta Temperature',
'pyname': u'delta_temperature',
'default': 0.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deltaC'}),
(u'delta temperature schedule name',
{'name': u'Delta Temperature Schedule Name',
'pyname': u'delta_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'minimum zone temperature schedule name',
{'name': u'Minimum Zone Temperature Schedule Name',
'pyname': u'minimum_zone_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum zone temperature schedule name',
{'name': u'Maximum Zone Temperature Schedule Name',
'pyname': u'maximum_zone_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'minimum source zone temperature schedule name',
{'name': u'Minimum Source Zone Temperature Schedule Name',
'pyname': u'minimum_source_zone_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum source zone temperature schedule name',
{'name': u'Maximum Source Zone Temperature Schedule Name',
'pyname': u'maximum_source_zone_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'minimum outdoor temperature schedule name',
{'name': u'Minimum Outdoor Temperature Schedule Name',
'pyname': u'minimum_outdoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum outdoor temperature schedule name',
{'name': u'Maximum Outdoor Temperature Schedule Name',
'pyname': u'maximum_outdoor_temperature_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 9,
'name': u'ZoneCrossMixing',
'pyname': u'ZoneCrossMixing',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def zone_name(self):
"""field `Zone Name`
Args:
value (str): value for IDD Field `Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_name` or None if not set
"""
return self["Zone Name"]
@zone_name.setter
def zone_name(self, value=None):
"""Corresponds to IDD field `Zone Name`"""
self["Zone Name"] = value
@property
def schedule_name(self):
"""field `Schedule Name`
Args:
value (str): value for IDD Field `Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `schedule_name` or None if not set
"""
return self["Schedule Name"]
@schedule_name.setter
def schedule_name(self, value=None):
"""Corresponds to IDD field `Schedule Name`"""
self["Schedule Name"] = value
@property
def design_flow_rate_calculation_method(self):
"""field `Design Flow Rate Calculation Method`
| The entered calculation method is used to create the maximum amount of ventilation
| for this set of attributes
| Choices: Flow/Zone => Design Flow Rate -- simply enter Design Flow Rate
| Flow/Area => Flow Rate per Zone Floor Area - Value * Floor Area (zone) = Design Flow Rate
| Flow/Person => Flow Rate per Person - Value * #people = Design Flow Rate
| AirChanges/Hour => Air Changes per Hour - Value * Floor Volume (zone) adjusted for m3/s = Design Volume Flow Rate
| "Vdesign" in Equation is the result.
| Default value: Flow/Zone
Args:
value (str): value for IDD Field `Design Flow Rate Calculation Method`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `design_flow_rate_calculation_method` or None if not set
"""
return self["Design Flow Rate Calculation Method"]
@design_flow_rate_calculation_method.setter
def design_flow_rate_calculation_method(self, value="Flow/Zone"):
"""Corresponds to IDD field `Design Flow Rate Calculation Method`"""
self["Design Flow Rate Calculation Method"] = value
@property
def design_flow_rate(self):
"""field `Design Flow Rate`
| Units: m3/s
Args:
value (float): value for IDD Field `Design Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `design_flow_rate` or None if not set
"""
return self["Design Flow Rate"]
@design_flow_rate.setter
def design_flow_rate(self, value=None):
"""Corresponds to IDD field `Design Flow Rate`"""
self["Design Flow Rate"] = value
@property
def flow_rate_per_zone_floor_area(self):
"""field `Flow Rate per Zone Floor Area`
| Units: m3/s-m2
Args:
value (float): value for IDD Field `Flow Rate per Zone Floor Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `flow_rate_per_zone_floor_area` or None if not set
"""
return self["Flow Rate per Zone Floor Area"]
@flow_rate_per_zone_floor_area.setter
def flow_rate_per_zone_floor_area(self, value=None):
"""Corresponds to IDD field `Flow Rate per Zone Floor Area`"""
self["Flow Rate per Zone Floor Area"] = value
@property
def flow_rate_per_person(self):
"""field `Flow Rate per Person`
| Units: m3/s-person
Args:
value (float): value for IDD Field `Flow Rate per Person`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `flow_rate_per_person` or None if not set
"""
return self["Flow Rate per Person"]
@flow_rate_per_person.setter
def flow_rate_per_person(self, value=None):
"""Corresponds to IDD field `Flow Rate per Person`"""
self["Flow Rate per Person"] = value
@property
def air_changes_per_hour(self):
"""field `Air Changes per Hour`
| Units: 1/hr
Args:
value (float): value for IDD Field `Air Changes per Hour`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `air_changes_per_hour` or None if not set
"""
return self["Air Changes per Hour"]
@air_changes_per_hour.setter
def air_changes_per_hour(self, value=None):
"""Corresponds to IDD field `Air Changes per Hour`"""
self["Air Changes per Hour"] = value
@property
def source_zone_name(self):
"""field `Source Zone Name`
Args:
value (str): value for IDD Field `Source Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `source_zone_name` or None if not set
"""
return self["Source Zone Name"]
@source_zone_name.setter
def source_zone_name(self, value=None):
"""Corresponds to IDD field `Source Zone Name`"""
self["Source Zone Name"] = value
@property
def delta_temperature(self):
"""field `Delta Temperature`
| This field contains the constant temperature differential between source and
| receiving zones below which cross mixing is shutoff. This value must be greater
| than or equal to zero.
| Units: deltaC
Args:
value (float): value for IDD Field `Delta Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `delta_temperature` or None if not set
"""
return self["Delta Temperature"]
@delta_temperature.setter
def delta_temperature(self, value=None):
"""Corresponds to IDD field `Delta Temperature`"""
self["Delta Temperature"] = value
@property
def delta_temperature_schedule_name(self):
"""field `Delta Temperature Schedule Name`
| This schedule contains the temperature differential between source and receiving
| zones versus time below which cross mixing is shutoff.
Args:
value (str): value for IDD Field `Delta Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `delta_temperature_schedule_name` or None if not set
"""
return self["Delta Temperature Schedule Name"]
@delta_temperature_schedule_name.setter
def delta_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Delta Temperature Schedule Name`"""
self["Delta Temperature Schedule Name"] = value
@property
def minimum_zone_temperature_schedule_name(self):
"""field `Minimum Zone Temperature Schedule Name`
| This schedule contains the indoor temperature versus time below which
| cross mixing is shutoff.
Args:
value (str): value for IDD Field `Minimum Zone Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `minimum_zone_temperature_schedule_name` or None if not set
"""
return self["Minimum Zone Temperature Schedule Name"]
@minimum_zone_temperature_schedule_name.setter
def minimum_zone_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Minimum Zone Temperature Schedule Name`"""
self["Minimum Zone Temperature Schedule Name"] = value
@property
def maximum_zone_temperature_schedule_name(self):
"""field `Maximum Zone Temperature Schedule Name`
| This schedule contains the indoor temperature versus time above which
| cross mixing is shutoff.
Args:
value (str): value for IDD Field `Maximum Zone Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `maximum_zone_temperature_schedule_name` or None if not set
"""
return self["Maximum Zone Temperature Schedule Name"]
@maximum_zone_temperature_schedule_name.setter
def maximum_zone_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Maximum Zone Temperature Schedule Name`"""
self["Maximum Zone Temperature Schedule Name"] = value
@property
def minimum_source_zone_temperature_schedule_name(self):
"""field `Minimum Source Zone Temperature Schedule Name`
| This schedule contains the source zone dry-bulb temperature versus time below
| which cross mixing is shutoff.
Args:
value (str): value for IDD Field `Minimum Source Zone Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `minimum_source_zone_temperature_schedule_name` or None if not set
"""
return self["Minimum Source Zone Temperature Schedule Name"]
@minimum_source_zone_temperature_schedule_name.setter
def minimum_source_zone_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Minimum Source Zone Temperature Schedule
Name`"""
self["Minimum Source Zone Temperature Schedule Name"] = value
@property
def maximum_source_zone_temperature_schedule_name(self):
"""field `Maximum Source Zone Temperature Schedule Name`
| This schedule contains the source zone dry-bulb temperature versus time above
| which cross mixing is shutoff.
Args:
value (str): value for IDD Field `Maximum Source Zone Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `maximum_source_zone_temperature_schedule_name` or None if not set
"""
return self["Maximum Source Zone Temperature Schedule Name"]
@maximum_source_zone_temperature_schedule_name.setter
def maximum_source_zone_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Maximum Source Zone Temperature Schedule
Name`"""
self["Maximum Source Zone Temperature Schedule Name"] = value
@property
def minimum_outdoor_temperature_schedule_name(self):
"""field `Minimum Outdoor Temperature Schedule Name`
| This schedule contains the outdoor temperature versus time below which
| cross mixing is shutoff.
Args:
value (str): value for IDD Field `Minimum Outdoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `minimum_outdoor_temperature_schedule_name` or None if not set
"""
return self["Minimum Outdoor Temperature Schedule Name"]
@minimum_outdoor_temperature_schedule_name.setter
def minimum_outdoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Minimum Outdoor Temperature Schedule
Name`"""
self["Minimum Outdoor Temperature Schedule Name"] = value
@property
def maximum_outdoor_temperature_schedule_name(self):
"""field `Maximum Outdoor Temperature Schedule Name`
| This schedule contains the outdoor temperature versus time above which
| cross mixing is shutoff.
Args:
value (str): value for IDD Field `Maximum Outdoor Temperature Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `maximum_outdoor_temperature_schedule_name` or None if not set
"""
return self["Maximum Outdoor Temperature Schedule Name"]
@maximum_outdoor_temperature_schedule_name.setter
def maximum_outdoor_temperature_schedule_name(self, value=None):
"""Corresponds to IDD field `Maximum Outdoor Temperature Schedule
Name`"""
self["Maximum Outdoor Temperature Schedule Name"] = value
class ZoneRefrigerationDoorMixing(DataObject):
"""Corresponds to IDD object `ZoneRefrigerationDoorMixing` Refrigeration
Door Mixing is used for an opening between two zones that are at the same
elevation but have different air temperatures.
In this case, the mixing air flow
between the two zones is determined by the density difference between the two zones.
This would typically be used between two zones in a refrigerated warehouse that are
controlled at different temperatures. It could also be used to model a door to a walk-in
refrigerated space if that space were modeled as a zone instead of using the object Refrigeration:WalkIn.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'zone 1 name',
{'name': u'Zone 1 Name',
'pyname': u'zone_1_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'zone 2 name',
{'name': u'Zone 2 Name',
'pyname': u'zone_2_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'schedule name',
{'name': u'Schedule Name',
'pyname': u'schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'door height',
{'name': u'Door Height',
'pyname': u'door_height',
'default': 3.0,
'maximum': 50.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'door area',
{'name': u'Door Area',
'pyname': u'door_area',
'default': 9.0,
'maximum': 400.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'door protection type',
{'name': u'Door Protection Type',
'pyname': u'door_protection_type',
'default': u'None',
'required-field': False,
'autosizable': False,
'accepted-values': [u'None',
u'AirCurtain',
u'StripCurtain'],
'autocalculatable': False,
'type': 'alpha'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 4,
'name': u'ZoneRefrigerationDoorMixing',
'pyname': u'ZoneRefrigerationDoorMixing',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def zone_1_name(self):
"""field `Zone 1 Name`
Args:
value (str): value for IDD Field `Zone 1 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_1_name` or None if not set
"""
return self["Zone 1 Name"]
@zone_1_name.setter
def zone_1_name(self, value=None):
"""Corresponds to IDD field `Zone 1 Name`"""
self["Zone 1 Name"] = value
@property
def zone_2_name(self):
"""field `Zone 2 Name`
Args:
value (str): value for IDD Field `Zone 2 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_2_name` or None if not set
"""
return self["Zone 2 Name"]
@zone_2_name.setter
def zone_2_name(self, value=None):
"""Corresponds to IDD field `Zone 2 Name`"""
self["Zone 2 Name"] = value
@property
def schedule_name(self):
"""field `Schedule Name`
| This schedule defines the fraction of the time the refrigeration door is open
| For example, if the warehouse is closed at night and there are no door openings
| between two zones, the value for that time period would be 0.
| If doors were propped open, the value over that time period would be 1.0
| If the doors were open about 20% of the time, the value over that period would be 0.2
| Schedule values must lie between 0 and 1.0
Args:
value (str): value for IDD Field `Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `schedule_name` or None if not set
"""
return self["Schedule Name"]
@schedule_name.setter
def schedule_name(self, value=None):
"""Corresponds to IDD field `Schedule Name`"""
self["Schedule Name"] = value
@property
def door_height(self):
"""field `Door Height`
| Units: m
| Default value: 3.0
| value <= 50.0
Args:
value (float): value for IDD Field `Door Height`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `door_height` or None if not set
"""
return self["Door Height"]
@door_height.setter
def door_height(self, value=3.0):
"""Corresponds to IDD field `Door Height`"""
self["Door Height"] = value
@property
def door_area(self):
"""field `Door Area`
| Units: m2
| Default value: 9.0
| value <= 400.0
Args:
value (float): value for IDD Field `Door Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `door_area` or None if not set
"""
return self["Door Area"]
@door_area.setter
def door_area(self, value=9.0):
"""Corresponds to IDD field `Door Area`"""
self["Door Area"] = value
@property
def door_protection_type(self):
"""field `Door Protection Type`
| Door protection can reduce the air flow through a refrigeration door
| The default value is "None"
| Choices: "None", "AirCurtain", and "StripCurtain"
| A strip curtain reduces the air flow more than an air curtain
| Default value: None
Args:
value (str): value for IDD Field `Door Protection Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `door_protection_type` or None if not set
"""
return self["Door Protection Type"]
@door_protection_type.setter
def door_protection_type(self, value="None"):
"""Corresponds to IDD field `Door Protection Type`"""
self["Door Protection Type"] = value
class ZoneEarthtube(DataObject):
""" Corresponds to IDD object `ZoneEarthtube`
Earth Tube is specified as a design level which is modified by a Schedule fraction, temperature difference and wind speed:
Earthtube=Edesign * Fschedule * (A + B*|(Tzone-Todb)| + C*WindSpd + D * WindSpd**2)
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'zone name',
{'name': u'Zone Name',
'pyname': u'zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'schedule name',
{'name': u'Schedule Name',
'pyname': u'schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'design flow rate',
{'name': u'Design Flow Rate',
'pyname': u'design_flow_rate',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'minimum zone temperature when cooling',
{'name': u'Minimum Zone Temperature when Cooling',
'pyname': u'minimum_zone_temperature_when_cooling',
'maximum': 100.0,
'required-field': True,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'maximum zone temperature when heating',
{'name': u'Maximum Zone Temperature when Heating',
'pyname': u'maximum_zone_temperature_when_heating',
'maximum': 100.0,
'required-field': True,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'delta temperature',
{'name': u'Delta Temperature',
'pyname': u'delta_temperature',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deltaC'}),
(u'earthtube type',
{'name': u'Earthtube Type',
'pyname': u'earthtube_type',
'default': u'Natural',
'required-field': True,
'autosizable': False,
'accepted-values': [u'Natural',
u'Intake',
u'Exhaust'],
'autocalculatable': False,
'type': 'alpha'}),
(u'fan pressure rise',
{'name': u'Fan Pressure Rise',
'pyname': u'fan_pressure_rise',
'default': 0.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'Pa'}),
(u'fan total efficiency',
{'name': u'Fan Total Efficiency',
'pyname': u'fan_total_efficiency',
'default': 1.0,
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'pipe radius',
{'name': u'Pipe Radius',
'pyname': u'pipe_radius',
'default': 1.0,
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'pipe thickness',
{'name': u'Pipe Thickness',
'pyname': u'pipe_thickness',
'default': 0.2,
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'pipe length',
{'name': u'Pipe Length',
'pyname': u'pipe_length',
'default': 15.0,
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'pipe thermal conductivity',
{'name': u'Pipe Thermal Conductivity',
'pyname': u'pipe_thermal_conductivity',
'default': 200.0,
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W/m-K'}),
(u'pipe depth under ground surface',
{'name': u'Pipe Depth Under Ground Surface',
'pyname': u'pipe_depth_under_ground_surface',
'default': 3.0,
'minimum>': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'soil condition',
{'name': u'Soil Condition',
'pyname': u'soil_condition',
'default': u'HeavyAndDamp',
'required-field': True,
'autosizable': False,
'accepted-values': [u'HeavyAndSaturated',
u'HeavyAndDamp',
u'HeavyAndDry',
u'LightAndDry'],
'autocalculatable': False,
'type': 'alpha'}),
(u'average soil surface temperature',
{'name': u'Average Soil Surface Temperature',
'pyname': u'average_soil_surface_temperature',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'amplitude of soil surface temperature',
{'name': u'Amplitude of Soil Surface Temperature',
'pyname': u'amplitude_of_soil_surface_temperature',
'default': 0.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deltaC'}),
(u'phase constant of soil surface temperature',
{'name': u'Phase Constant of Soil Surface Temperature',
'pyname': u'phase_constant_of_soil_surface_temperature',
'default': 0.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'days'}),
(u'constant term flow coefficient',
{'name': u'Constant Term Flow Coefficient',
'pyname': u'constant_term_flow_coefficient',
'default': 1.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'temperature term flow coefficient',
{'name': u'Temperature Term Flow Coefficient',
'pyname': u'temperature_term_flow_coefficient',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'velocity term flow coefficient',
{'name': u'Velocity Term Flow Coefficient',
'pyname': u'velocity_term_flow_coefficient',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'}),
(u'velocity squared term flow coefficient',
{'name': u'Velocity Squared Term Flow Coefficient',
'pyname': u'velocity_squared_term_flow_coefficient',
'default': 0.0,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 22,
'name': u'ZoneEarthtube',
'pyname': u'ZoneEarthtube',
'required-object': False,
'unique-object': False}
@property
def zone_name(self):
"""field `Zone Name`
Args:
value (str): value for IDD Field `Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_name` or None if not set
"""
return self["Zone Name"]
@zone_name.setter
def zone_name(self, value=None):
"""Corresponds to IDD field `Zone Name`"""
self["Zone Name"] = value
@property
def schedule_name(self):
"""field `Schedule Name`
Args:
value (str): value for IDD Field `Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `schedule_name` or None if not set
"""
return self["Schedule Name"]
@schedule_name.setter
def schedule_name(self, value=None):
"""Corresponds to IDD field `Schedule Name`"""
self["Schedule Name"] = value
@property
def design_flow_rate(self):
"""field `Design Flow Rate`
| "Edesign" in Equation
| Units: m3/s
Args:
value (float): value for IDD Field `Design Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `design_flow_rate` or None if not set
"""
return self["Design Flow Rate"]
@design_flow_rate.setter
def design_flow_rate(self, value=None):
"""Corresponds to IDD field `Design Flow Rate`"""
self["Design Flow Rate"] = value
@property
def minimum_zone_temperature_when_cooling(self):
"""field `Minimum Zone Temperature when Cooling`
| this is the indoor temperature below which the earth tube is shut off
| Units: C
| value >= -100.0
| value <= 100.0
Args:
value (float): value for IDD Field `Minimum Zone Temperature when Cooling`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_zone_temperature_when_cooling` or None if not set
"""
return self["Minimum Zone Temperature when Cooling"]
@minimum_zone_temperature_when_cooling.setter
def minimum_zone_temperature_when_cooling(self, value=None):
"""Corresponds to IDD field `Minimum Zone Temperature when Cooling`"""
self["Minimum Zone Temperature when Cooling"] = value
@property
def maximum_zone_temperature_when_heating(self):
"""field `Maximum Zone Temperature when Heating`
| this is the indoor temperature above which the earth tube is shut off
| Units: C
| value >= -100.0
| value <= 100.0
Args:
value (float): value for IDD Field `Maximum Zone Temperature when Heating`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_zone_temperature_when_heating` or None if not set
"""
return self["Maximum Zone Temperature when Heating"]
@maximum_zone_temperature_when_heating.setter
def maximum_zone_temperature_when_heating(self, value=None):
"""Corresponds to IDD field `Maximum Zone Temperature when Heating`"""
self["Maximum Zone Temperature when Heating"] = value
@property
def delta_temperature(self):
"""field `Delta Temperature`
| This is the temperature difference between indoor and outdoor below which the earth tube is shut off
| Units: deltaC
Args:
value (float): value for IDD Field `Delta Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `delta_temperature` or None if not set
"""
return self["Delta Temperature"]
@delta_temperature.setter
def delta_temperature(self, value=None):
"""Corresponds to IDD field `Delta Temperature`"""
self["Delta Temperature"] = value
@property
def earthtube_type(self):
"""field `Earthtube Type`
| Default value: Natural
Args:
value (str): value for IDD Field `Earthtube Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `earthtube_type` or None if not set
"""
return self["Earthtube Type"]
@earthtube_type.setter
def earthtube_type(self, value="Natural"):
"""Corresponds to IDD field `Earthtube Type`"""
self["Earthtube Type"] = value
@property
def fan_pressure_rise(self):
"""field `Fan Pressure Rise`
| pressure rise across the fan
| Units: Pa
Args:
value (float): value for IDD Field `Fan Pressure Rise`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fan_pressure_rise` or None if not set
"""
return self["Fan Pressure Rise"]
@fan_pressure_rise.setter
def fan_pressure_rise(self, value=None):
"""Corresponds to IDD field `Fan Pressure Rise`"""
self["Fan Pressure Rise"] = value
@property
def fan_total_efficiency(self):
"""field `Fan Total Efficiency`
| Default value: 1.0
Args:
value (float): value for IDD Field `Fan Total Efficiency`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fan_total_efficiency` or None if not set
"""
return self["Fan Total Efficiency"]
@fan_total_efficiency.setter
def fan_total_efficiency(self, value=1.0):
"""Corresponds to IDD field `Fan Total Efficiency`"""
self["Fan Total Efficiency"] = value
@property
def pipe_radius(self):
"""field `Pipe Radius`
| Units: m
| Default value: 1.0
Args:
value (float): value for IDD Field `Pipe Radius`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `pipe_radius` or None if not set
"""
return self["Pipe Radius"]
@pipe_radius.setter
def pipe_radius(self, value=1.0):
"""Corresponds to IDD field `Pipe Radius`"""
self["Pipe Radius"] = value
@property
def pipe_thickness(self):
"""field `Pipe Thickness`
| Units: m
| Default value: 0.2
Args:
value (float): value for IDD Field `Pipe Thickness`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `pipe_thickness` or None if not set
"""
return self["Pipe Thickness"]
@pipe_thickness.setter
def pipe_thickness(self, value=0.2):
"""Corresponds to IDD field `Pipe Thickness`"""
self["Pipe Thickness"] = value
@property
def pipe_length(self):
"""field `Pipe Length`
| Units: m
| Default value: 15.0
Args:
value (float): value for IDD Field `Pipe Length`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `pipe_length` or None if not set
"""
return self["Pipe Length"]
@pipe_length.setter
def pipe_length(self, value=15.0):
"""Corresponds to IDD field `Pipe Length`"""
self["Pipe Length"] = value
@property
def pipe_thermal_conductivity(self):
"""field `Pipe Thermal Conductivity`
| Units: W/m-K
| Default value: 200.0
Args:
value (float): value for IDD Field `Pipe Thermal Conductivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `pipe_thermal_conductivity` or None if not set
"""
return self["Pipe Thermal Conductivity"]
@pipe_thermal_conductivity.setter
def pipe_thermal_conductivity(self, value=200.0):
"""Corresponds to IDD field `Pipe Thermal Conductivity`"""
self["Pipe Thermal Conductivity"] = value
@property
def pipe_depth_under_ground_surface(self):
"""field `Pipe Depth Under Ground Surface`
| Units: m
| Default value: 3.0
Args:
value (float): value for IDD Field `Pipe Depth Under Ground Surface`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `pipe_depth_under_ground_surface` or None if not set
"""
return self["Pipe Depth Under Ground Surface"]
@pipe_depth_under_ground_surface.setter
def pipe_depth_under_ground_surface(self, value=3.0):
"""Corresponds to IDD field `Pipe Depth Under Ground Surface`"""
self["Pipe Depth Under Ground Surface"] = value
@property
def soil_condition(self):
"""field `Soil Condition`
| Default value: HeavyAndDamp
Args:
value (str): value for IDD Field `Soil Condition`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `soil_condition` or None if not set
"""
return self["Soil Condition"]
@soil_condition.setter
def soil_condition(self, value="HeavyAndDamp"):
"""Corresponds to IDD field `Soil Condition`"""
self["Soil Condition"] = value
@property
def average_soil_surface_temperature(self):
"""field `Average Soil Surface Temperature`
| Units: C
Args:
value (float): value for IDD Field `Average Soil Surface Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `average_soil_surface_temperature` or None if not set
"""
return self["Average Soil Surface Temperature"]
@average_soil_surface_temperature.setter
def average_soil_surface_temperature(self, value=None):
"""Corresponds to IDD field `Average Soil Surface Temperature`"""
self["Average Soil Surface Temperature"] = value
@property
def amplitude_of_soil_surface_temperature(self):
"""field `Amplitude of Soil Surface Temperature`
| Units: deltaC
Args:
value (float): value for IDD Field `Amplitude of Soil Surface Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `amplitude_of_soil_surface_temperature` or None if not set
"""
return self["Amplitude of Soil Surface Temperature"]
@amplitude_of_soil_surface_temperature.setter
def amplitude_of_soil_surface_temperature(self, value=None):
"""Corresponds to IDD field `Amplitude of Soil Surface Temperature`"""
self["Amplitude of Soil Surface Temperature"] = value
@property
def phase_constant_of_soil_surface_temperature(self):
"""field `Phase Constant of Soil Surface Temperature`
| Units: days
Args:
value (float): value for IDD Field `Phase Constant of Soil Surface Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `phase_constant_of_soil_surface_temperature` or None if not set
"""
return self["Phase Constant of Soil Surface Temperature"]
@phase_constant_of_soil_surface_temperature.setter
def phase_constant_of_soil_surface_temperature(self, value=None):
"""Corresponds to IDD field `Phase Constant of Soil Surface
Temperature`"""
self["Phase Constant of Soil Surface Temperature"] = value
@property
def constant_term_flow_coefficient(self):
"""field `Constant Term Flow Coefficient`
| "A" in Equation
| Default value: 1.0
Args:
value (float): value for IDD Field `Constant Term Flow Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `constant_term_flow_coefficient` or None if not set
"""
return self["Constant Term Flow Coefficient"]
@constant_term_flow_coefficient.setter
def constant_term_flow_coefficient(self, value=1.0):
"""Corresponds to IDD field `Constant Term Flow Coefficient`"""
self["Constant Term Flow Coefficient"] = value
@property
def temperature_term_flow_coefficient(self):
"""field `Temperature Term Flow Coefficient`
| "B" in Equation
Args:
value (float): value for IDD Field `Temperature Term Flow Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `temperature_term_flow_coefficient` or None if not set
"""
return self["Temperature Term Flow Coefficient"]
@temperature_term_flow_coefficient.setter
def temperature_term_flow_coefficient(self, value=None):
"""Corresponds to IDD field `Temperature Term Flow Coefficient`"""
self["Temperature Term Flow Coefficient"] = value
@property
def velocity_term_flow_coefficient(self):
"""field `Velocity Term Flow Coefficient`
| "C" in Equation
Args:
value (float): value for IDD Field `Velocity Term Flow Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `velocity_term_flow_coefficient` or None if not set
"""
return self["Velocity Term Flow Coefficient"]
@velocity_term_flow_coefficient.setter
def velocity_term_flow_coefficient(self, value=None):
"""Corresponds to IDD field `Velocity Term Flow Coefficient`"""
self["Velocity Term Flow Coefficient"] = value
@property
def velocity_squared_term_flow_coefficient(self):
"""field `Velocity Squared Term Flow Coefficient`
| "D" in Equation
Args:
value (float): value for IDD Field `Velocity Squared Term Flow Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `velocity_squared_term_flow_coefficient` or None if not set
"""
return self["Velocity Squared Term Flow Coefficient"]
@velocity_squared_term_flow_coefficient.setter
def velocity_squared_term_flow_coefficient(self, value=None):
"""Corresponds to IDD field `Velocity Squared Term Flow Coefficient`"""
self["Velocity Squared Term Flow Coefficient"] = value
class ZoneCoolTowerShower(DataObject):
""" Corresponds to IDD object `ZoneCoolTower:Shower`
A cooltower (sometimes referred to as a wind tower or a shower cooling tower)
models passive downdraught evaporative cooling (PDEC) that is designed to capture the
wind at the top of a tower and cool the outdoor air using water evaporation before
delivering it to a space.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'zone name',
{'name': u'Zone Name',
'pyname': u'zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'water supply storage tank name',
{'name': u'Water Supply Storage Tank Name',
'pyname': u'water_supply_storage_tank_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'flow control type',
{'name': u'Flow Control Type',
'pyname': u'flow_control_type',
'default': u'WindDrivenFlow',
'required-field': False,
'autosizable': False,
'accepted-values': [u'WaterFlowSchedule',
u'WindDrivenFlow'],
'autocalculatable': False,
'type': 'alpha'}),
(u'pump flow rate schedule name',
{'name': u'Pump Flow Rate Schedule Name',
'pyname': u'pump_flow_rate_schedule_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'maximum water flow rate',
{'name': u'Maximum Water Flow Rate',
'pyname': u'maximum_water_flow_rate',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'effective tower height',
{'name': u'Effective Tower Height',
'pyname': u'effective_tower_height',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'airflow outlet area',
{'name': u'Airflow Outlet Area',
'pyname': u'airflow_outlet_area',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'maximum air flow rate',
{'name': u'Maximum Air Flow Rate',
'pyname': u'maximum_air_flow_rate',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm3/s'}),
(u'minimum indoor temperature',
{'name': u'Minimum Indoor Temperature',
'pyname': u'minimum_indoor_temperature',
'maximum': 100.0,
'required-field': True,
'autosizable': False,
'minimum': -100.0,
'autocalculatable': False,
'type': u'real',
'unit': u'C'}),
(u'fraction of water loss',
{'name': u'Fraction of Water Loss',
'pyname': u'fraction_of_water_loss',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real'}),
(u'fraction of flow schedule',
{'name': u'Fraction of Flow Schedule',
'pyname': u'fraction_of_flow_schedule',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': 'real'}),
(u'rated power consumption',
{'name': u'Rated Power Consumption',
'pyname': u'rated_power_consumption',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'W'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 0,
'name': u'ZoneCoolTower:Shower',
'pyname': u'ZoneCoolTowerShower',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def zone_name(self):
"""field `Zone Name`
Args:
value (str): value for IDD Field `Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_name` or None if not set
"""
return self["Zone Name"]
@zone_name.setter
def zone_name(self, value=None):
"""Corresponds to IDD field `Zone Name`"""
self["Zone Name"] = value
@property
def water_supply_storage_tank_name(self):
"""field `Water Supply Storage Tank Name`
| In case of stand alone tank or underground water, leave this input blank
Args:
value (str): value for IDD Field `Water Supply Storage Tank Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `water_supply_storage_tank_name` or None if not set
"""
return self["Water Supply Storage Tank Name"]
@water_supply_storage_tank_name.setter
def water_supply_storage_tank_name(self, value=None):
"""Corresponds to IDD field `Water Supply Storage Tank Name`"""
self["Water Supply Storage Tank Name"] = value
@property
def flow_control_type(self):
"""field `Flow Control Type`
| Water flow schedule should be selected when the water flow rate is known.
| Wind-driven flow should be selected when the water flow rate is unknown.
| Default value: WindDrivenFlow
Args:
value (str): value for IDD Field `Flow Control Type`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `flow_control_type` or None if not set
"""
return self["Flow Control Type"]
@flow_control_type.setter
def flow_control_type(self, value="WindDrivenFlow"):
"""Corresponds to IDD field `Flow Control Type`"""
self["Flow Control Type"] = value
@property
def pump_flow_rate_schedule_name(self):
"""field `Pump Flow Rate Schedule Name`
Args:
value (str): value for IDD Field `Pump Flow Rate Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `pump_flow_rate_schedule_name` or None if not set
"""
return self["Pump Flow Rate Schedule Name"]
@pump_flow_rate_schedule_name.setter
def pump_flow_rate_schedule_name(self, value=None):
"""Corresponds to IDD field `Pump Flow Rate Schedule Name`"""
self["Pump Flow Rate Schedule Name"] = value
@property
def maximum_water_flow_rate(self):
"""field `Maximum Water Flow Rate`
| Units: m3/s
Args:
value (float): value for IDD Field `Maximum Water Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_water_flow_rate` or None if not set
"""
return self["Maximum Water Flow Rate"]
@maximum_water_flow_rate.setter
def maximum_water_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Water Flow Rate`"""
self["Maximum Water Flow Rate"] = value
@property
def effective_tower_height(self):
"""field `Effective Tower Height`
| This field is from either the spray or the wet pad to the top of the outlet.
| Units: m
Args:
value (float): value for IDD Field `Effective Tower Height`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `effective_tower_height` or None if not set
"""
return self["Effective Tower Height"]
@effective_tower_height.setter
def effective_tower_height(self, value=None):
"""Corresponds to IDD field `Effective Tower Height`"""
self["Effective Tower Height"] = value
@property
def airflow_outlet_area(self):
"""field `Airflow Outlet Area`
| User have to specify effective area when outlet area is relatively bigger than the cross sectional area
| of cooltower. If the number of outlet is more than one, assume the air passes through only one.
| Units: m2
Args:
value (float): value for IDD Field `Airflow Outlet Area`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `airflow_outlet_area` or None if not set
"""
return self["Airflow Outlet Area"]
@airflow_outlet_area.setter
def airflow_outlet_area(self, value=None):
"""Corresponds to IDD field `Airflow Outlet Area`"""
self["Airflow Outlet Area"] = value
@property
def maximum_air_flow_rate(self):
"""field `Maximum Air Flow Rate`
| Units: m3/s
Args:
value (float): value for IDD Field `Maximum Air Flow Rate`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `maximum_air_flow_rate` or None if not set
"""
return self["Maximum Air Flow Rate"]
@maximum_air_flow_rate.setter
def maximum_air_flow_rate(self, value=None):
"""Corresponds to IDD field `Maximum Air Flow Rate`"""
self["Maximum Air Flow Rate"] = value
@property
def minimum_indoor_temperature(self):
"""field `Minimum Indoor Temperature`
| This field is to specify the indoor temperature below which cooltower is shutoff.
| Units: C
| value >= -100.0
| value <= 100.0
Args:
value (float): value for IDD Field `Minimum Indoor Temperature`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `minimum_indoor_temperature` or None if not set
"""
return self["Minimum Indoor Temperature"]
@minimum_indoor_temperature.setter
def minimum_indoor_temperature(self, value=None):
"""Corresponds to IDD field `Minimum Indoor Temperature`"""
self["Minimum Indoor Temperature"] = value
@property
def fraction_of_water_loss(self):
"""field `Fraction of Water Loss`
| value <= 1.0
Args:
value (float): value for IDD Field `Fraction of Water Loss`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fraction_of_water_loss` or None if not set
"""
return self["Fraction of Water Loss"]
@fraction_of_water_loss.setter
def fraction_of_water_loss(self, value=None):
"""Corresponds to IDD field `Fraction of Water Loss`"""
self["Fraction of Water Loss"] = value
@property
def fraction_of_flow_schedule(self):
"""field `Fraction of Flow Schedule`
| value <= 1.0
Args:
value (float): value for IDD Field `Fraction of Flow Schedule`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `fraction_of_flow_schedule` or None if not set
"""
return self["Fraction of Flow Schedule"]
@fraction_of_flow_schedule.setter
def fraction_of_flow_schedule(self, value=None):
"""Corresponds to IDD field `Fraction of Flow Schedule`"""
self["Fraction of Flow Schedule"] = value
@property
def rated_power_consumption(self):
"""field `Rated Power Consumption`
| Units: W
Args:
value (float): value for IDD Field `Rated Power Consumption`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `rated_power_consumption` or None if not set
"""
return self["Rated Power Consumption"]
@rated_power_consumption.setter
def rated_power_consumption(self, value=None):
"""Corresponds to IDD field `Rated Power Consumption`"""
self["Rated Power Consumption"] = value
class ZoneThermalChimney(DataObject):
"""Corresponds to IDD object `ZoneThermalChimney` A thermal chimney is a
vertical shaft utilizing solar radiation to enhance natural ventilation.
It consists of an absorber wall, air gap and glass cover with high
solar transmissivity.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': 'alpha'}),
(u'zone name',
{'name': u'Zone Name',
'pyname': u'zone_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'availability schedule name',
{'name': u'Availability Schedule Name',
'pyname': u'availability_schedule_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'width of the absorber wall',
{'name': u'Width of the Absorber Wall',
'pyname': u'width_of_the_absorber_wall',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'cross sectional area of air channel outlet',
{'name': u'Cross Sectional Area of Air Channel Outlet',
'pyname': u'cross_sectional_area_of_air_channel_outlet',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'discharge coefficient',
{'name': u'Discharge Coefficient',
'pyname': u'discharge_coefficient',
'default': 0.8,
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'zone 1 name',
{'name': u'Zone 1 Name',
'pyname': u'zone_1_name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 1',
{'name': u'Distance from Top of Thermal Chimney to Inlet 1',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_1',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 1',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 1',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_1',
'default': 1.0,
'maximum': 1.0,
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 1',
{'name': u'Cross Sectional Areas of Air Channel Inlet 1',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_1',
'required-field': True,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 2 name',
{'name': u'Zone 2 Name',
'pyname': u'zone_2_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 2',
{'name': u'Distance from Top of Thermal Chimney to Inlet 2',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_2',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 2',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 2',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_2',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 2',
{'name': u'Cross Sectional Areas of Air Channel Inlet 2',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_2',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 3 name',
{'name': u'Zone 3 Name',
'pyname': u'zone_3_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 3',
{'name': u'Distance from Top of Thermal Chimney to Inlet 3',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_3',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 3',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 3',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_3',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 3',
{'name': u'Cross Sectional Areas of Air Channel Inlet 3',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_3',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 4 name',
{'name': u'Zone 4 Name',
'pyname': u'zone_4_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 4',
{'name': u'Distance from Top of Thermal Chimney to Inlet 4',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_4',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 4',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 4',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_4',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 4',
{'name': u'Cross Sectional Areas of Air Channel Inlet 4',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_4',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 5 name',
{'name': u'Zone 5 Name',
'pyname': u'zone_5_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 5',
{'name': u'Distance from Top of Thermal Chimney to Inlet 5',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_5',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 5',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 5',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_5',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 5',
{'name': u'Cross Sectional Areas of Air Channel Inlet 5',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_5',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 6 name',
{'name': u'Zone 6 Name',
'pyname': u'zone_6_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 6',
{'name': u'Distance from Top of Thermal Chimney to Inlet 6',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_6',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 6',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 6',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_6',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 6',
{'name': u'Cross Sectional Areas of Air Channel Inlet 6',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_6',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 7 name',
{'name': u'Zone 7 Name',
'pyname': u'zone_7_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 7',
{'name': u'Distance from Top of Thermal Chimney to Inlet 7',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_7',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 7',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 7',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_7',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 7',
{'name': u'Cross Sectional Areas of Air Channel Inlet 7',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_7',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 8 name',
{'name': u'Zone 8 Name',
'pyname': u'zone_8_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 8',
{'name': u'Distance from Top of Thermal Chimney to Inlet 8',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_8',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 8',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 8',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_8',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 8',
{'name': u'Cross Sectional Areas of Air Channel Inlet 8',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_8',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 9 name',
{'name': u'Zone 9 Name',
'pyname': u'zone_9_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 9',
{'name': u'Distance from Top of Thermal Chimney to Inlet 9',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_9',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 9',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 9',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_9',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 9',
{'name': u'Cross Sectional Areas of Air Channel Inlet 9',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_9',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 10 name',
{'name': u'Zone 10 Name',
'pyname': u'zone_10_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 10',
{'name': u'Distance from Top of Thermal Chimney to Inlet 10',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_10',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 10',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 10',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_10',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 10',
{'name': u'Cross Sectional Areas of Air Channel Inlet 10',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_10',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 11 name',
{'name': u'Zone 11 Name',
'pyname': u'zone_11_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 11',
{'name': u'Distance from Top of Thermal Chimney to Inlet 11',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_11',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 11',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 11',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_11',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 11',
{'name': u'Cross Sectional Areas of Air Channel Inlet 11',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_11',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 12 name',
{'name': u'Zone 12 Name',
'pyname': u'zone_12_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 12',
{'name': u'Distance from Top of Thermal Chimney to Inlet 12',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_12',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 12',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 12',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_12',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 12',
{'name': u'Cross Sectional Areas of Air Channel Inlet 12',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_12',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 13 name',
{'name': u'Zone 13 Name',
'pyname': u'zone_13_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 13',
{'name': u'Distance from Top of Thermal Chimney to Inlet 13',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_13',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 13',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 13',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_13',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 13',
{'name': u'Cross Sectional Areas of Air Channel Inlet 13',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_13',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 14 name',
{'name': u'Zone 14 Name',
'pyname': u'zone_14_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 14',
{'name': u'Distance from Top of Thermal Chimney to Inlet 14',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_14',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 14',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 14',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_14',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 14',
{'name': u'Cross Sectional Areas of Air Channel Inlet 14',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_14',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 15 name',
{'name': u'Zone 15 Name',
'pyname': u'zone_15_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 15',
{'name': u'Distance from Top of Thermal Chimney to Inlet 15',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_15',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 15',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 15',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_15',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 15',
{'name': u'Cross Sectional Areas of Air Channel Inlet 15',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_15',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 16 name',
{'name': u'Zone 16 Name',
'pyname': u'zone_16_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 16',
{'name': u'Distance from Top of Thermal Chimney to Inlet 16',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_16',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 16',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 16',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_16',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 16',
{'name': u'Cross Sectional Areas of Air Channel Inlet 16',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_16',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 17 name',
{'name': u'Zone 17 Name',
'pyname': u'zone_17_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 17',
{'name': u'Distance from Top of Thermal Chimney to Inlet 17',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_17',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 17',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 17',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_17',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 17',
{'name': u'Cross Sectional Areas of Air Channel Inlet 17',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_17',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 18 name',
{'name': u'Zone 18 Name',
'pyname': u'zone_18_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 18',
{'name': u'Distance from Top of Thermal Chimney to Inlet 18',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_18',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 18',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 18',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_18',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 18',
{'name': u'Cross Sectional Areas of Air Channel Inlet 18',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_18',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 19 name',
{'name': u'Zone 19 Name',
'pyname': u'zone_19_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 19',
{'name': u'Distance from Top of Thermal Chimney to Inlet 19',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_19',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 19',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 19',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_19',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 19',
{'name': u'Cross Sectional Areas of Air Channel Inlet 19',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_19',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'}),
(u'zone 20 name',
{'name': u'Zone 20 Name',
'pyname': u'zone_20_name',
'required-field': False,
'autosizable': False,
'autocalculatable': False,
'type': u'object-list'}),
(u'distance from top of thermal chimney to inlet 20',
{'name': u'Distance from Top of Thermal Chimney to Inlet 20',
'pyname': u'distance_from_top_of_thermal_chimney_to_inlet_20',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'relative ratios of air flow rates passing through zone 20',
{'name': u'Relative Ratios of Air Flow Rates Passing through Zone 20',
'pyname': u'relative_ratios_of_air_flow_rates_passing_through_zone_20',
'maximum': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'cross sectional areas of air channel inlet 20',
{'name': u'Cross Sectional Areas of Air Channel Inlet 20',
'pyname': u'cross_sectional_areas_of_air_channel_inlet_20',
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm2'})]),
'format': None,
'group': u'Zone Airflow',
'min-fields': 10,
'name': u'ZoneThermalChimney',
'pyname': u'ZoneThermalChimney',
'required-object': False,
'unique-object': False}
@property
def name(self):
"""field `Name`
Args:
value (str): value for IDD Field `Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `name` or None if not set
"""
return self["Name"]
@name.setter
def name(self, value=None):
"""Corresponds to IDD field `Name`"""
self["Name"] = value
@property
def zone_name(self):
"""field `Zone Name`
| Name of zone that is the thermal chimney
Args:
value (str): value for IDD Field `Zone Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_name` or None if not set
"""
return self["Zone Name"]
@zone_name.setter
def zone_name(self, value=None):
"""Corresponds to IDD field `Zone Name`"""
self["Zone Name"] = value
@property
def availability_schedule_name(self):
"""field `Availability Schedule Name`
| Availability schedule name for this system. Schedule value > 0 means the system is available.
| If this field is blank, the system is always available.
Args:
value (str): value for IDD Field `Availability Schedule Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `availability_schedule_name` or None if not set
"""
return self["Availability Schedule Name"]
@availability_schedule_name.setter
def availability_schedule_name(self, value=None):
"""Corresponds to IDD field `Availability Schedule Name`"""
self["Availability Schedule Name"] = value
@property
def width_of_the_absorber_wall(self):
"""field `Width of the Absorber Wall`
| Units: m
Args:
value (float): value for IDD Field `Width of the Absorber Wall`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `width_of_the_absorber_wall` or None if not set
"""
return self["Width of the Absorber Wall"]
@width_of_the_absorber_wall.setter
def width_of_the_absorber_wall(self, value=None):
"""Corresponds to IDD field `Width of the Absorber Wall`"""
self["Width of the Absorber Wall"] = value
@property
def cross_sectional_area_of_air_channel_outlet(self):
"""field `Cross Sectional Area of Air Channel Outlet`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Area of Air Channel Outlet`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_area_of_air_channel_outlet` or None if not set
"""
return self["Cross Sectional Area of Air Channel Outlet"]
@cross_sectional_area_of_air_channel_outlet.setter
def cross_sectional_area_of_air_channel_outlet(self, value=None):
"""Corresponds to IDD field `Cross Sectional Area of Air Channel
Outlet`"""
self["Cross Sectional Area of Air Channel Outlet"] = value
@property
def discharge_coefficient(self):
"""field `Discharge Coefficient`
| Default value: 0.8
| value <= 1.0
Args:
value (float): value for IDD Field `Discharge Coefficient`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `discharge_coefficient` or None if not set
"""
return self["Discharge Coefficient"]
@discharge_coefficient.setter
def discharge_coefficient(self, value=0.8):
"""Corresponds to IDD field `Discharge Coefficient`"""
self["Discharge Coefficient"] = value
@property
def zone_1_name(self):
"""field `Zone 1 Name`
Args:
value (str): value for IDD Field `Zone 1 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_1_name` or None if not set
"""
return self["Zone 1 Name"]
@zone_1_name.setter
def zone_1_name(self, value=None):
"""Corresponds to IDD field `Zone 1 Name`"""
self["Zone 1 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_1(self):
"""field `Distance from Top of Thermal Chimney to Inlet 1`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_1` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 1"]
@distance_from_top_of_thermal_chimney_to_inlet_1.setter
def distance_from_top_of_thermal_chimney_to_inlet_1(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 1`"""
self["Distance from Top of Thermal Chimney to Inlet 1"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_1(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 1`
| Default value: 1.0
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_1` or None if not set
"""
return self["Relative Ratios of Air Flow Rates Passing through Zone 1"]
@relative_ratios_of_air_flow_rates_passing_through_zone_1.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_1(
self,
value=1.0):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 1`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 1"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_1(self):
"""field `Cross Sectional Areas of Air Channel Inlet 1`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 1`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_1` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 1"]
@cross_sectional_areas_of_air_channel_inlet_1.setter
def cross_sectional_areas_of_air_channel_inlet_1(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
1`"""
self["Cross Sectional Areas of Air Channel Inlet 1"] = value
@property
def zone_2_name(self):
"""field `Zone 2 Name`
Args:
value (str): value for IDD Field `Zone 2 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_2_name` or None if not set
"""
return self["Zone 2 Name"]
@zone_2_name.setter
def zone_2_name(self, value=None):
"""Corresponds to IDD field `Zone 2 Name`"""
self["Zone 2 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_2(self):
"""field `Distance from Top of Thermal Chimney to Inlet 2`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_2` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 2"]
@distance_from_top_of_thermal_chimney_to_inlet_2.setter
def distance_from_top_of_thermal_chimney_to_inlet_2(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 2`"""
self["Distance from Top of Thermal Chimney to Inlet 2"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_2(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 2`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_2` or None if not set
"""
return self["Relative Ratios of Air Flow Rates Passing through Zone 2"]
@relative_ratios_of_air_flow_rates_passing_through_zone_2.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_2(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 2`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 2"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_2(self):
"""field `Cross Sectional Areas of Air Channel Inlet 2`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 2`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_2` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 2"]
@cross_sectional_areas_of_air_channel_inlet_2.setter
def cross_sectional_areas_of_air_channel_inlet_2(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
2`"""
self["Cross Sectional Areas of Air Channel Inlet 2"] = value
@property
def zone_3_name(self):
"""field `Zone 3 Name`
Args:
value (str): value for IDD Field `Zone 3 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_3_name` or None if not set
"""
return self["Zone 3 Name"]
@zone_3_name.setter
def zone_3_name(self, value=None):
"""Corresponds to IDD field `Zone 3 Name`"""
self["Zone 3 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_3(self):
"""field `Distance from Top of Thermal Chimney to Inlet 3`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_3` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 3"]
@distance_from_top_of_thermal_chimney_to_inlet_3.setter
def distance_from_top_of_thermal_chimney_to_inlet_3(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 3`"""
self["Distance from Top of Thermal Chimney to Inlet 3"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_3(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 3`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_3` or None if not set
"""
return self["Relative Ratios of Air Flow Rates Passing through Zone 3"]
@relative_ratios_of_air_flow_rates_passing_through_zone_3.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_3(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 3`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 3"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_3(self):
"""field `Cross Sectional Areas of Air Channel Inlet 3`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 3`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_3` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 3"]
@cross_sectional_areas_of_air_channel_inlet_3.setter
def cross_sectional_areas_of_air_channel_inlet_3(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
3`"""
self["Cross Sectional Areas of Air Channel Inlet 3"] = value
@property
def zone_4_name(self):
"""field `Zone 4 Name`
Args:
value (str): value for IDD Field `Zone 4 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_4_name` or None if not set
"""
return self["Zone 4 Name"]
@zone_4_name.setter
def zone_4_name(self, value=None):
"""Corresponds to IDD field `Zone 4 Name`"""
self["Zone 4 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_4(self):
"""field `Distance from Top of Thermal Chimney to Inlet 4`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_4` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 4"]
@distance_from_top_of_thermal_chimney_to_inlet_4.setter
def distance_from_top_of_thermal_chimney_to_inlet_4(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 4`"""
self["Distance from Top of Thermal Chimney to Inlet 4"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_4(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 4`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_4` or None if not set
"""
return self["Relative Ratios of Air Flow Rates Passing through Zone 4"]
@relative_ratios_of_air_flow_rates_passing_through_zone_4.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_4(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 4`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 4"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_4(self):
"""field `Cross Sectional Areas of Air Channel Inlet 4`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 4`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_4` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 4"]
@cross_sectional_areas_of_air_channel_inlet_4.setter
def cross_sectional_areas_of_air_channel_inlet_4(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
4`"""
self["Cross Sectional Areas of Air Channel Inlet 4"] = value
@property
def zone_5_name(self):
"""field `Zone 5 Name`
Args:
value (str): value for IDD Field `Zone 5 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_5_name` or None if not set
"""
return self["Zone 5 Name"]
@zone_5_name.setter
def zone_5_name(self, value=None):
"""Corresponds to IDD field `Zone 5 Name`"""
self["Zone 5 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_5(self):
"""field `Distance from Top of Thermal Chimney to Inlet 5`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_5` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 5"]
@distance_from_top_of_thermal_chimney_to_inlet_5.setter
def distance_from_top_of_thermal_chimney_to_inlet_5(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 5`"""
self["Distance from Top of Thermal Chimney to Inlet 5"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_5(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 5`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_5` or None if not set
"""
return self["Relative Ratios of Air Flow Rates Passing through Zone 5"]
@relative_ratios_of_air_flow_rates_passing_through_zone_5.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_5(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 5`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 5"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_5(self):
"""field `Cross Sectional Areas of Air Channel Inlet 5`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 5`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_5` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 5"]
@cross_sectional_areas_of_air_channel_inlet_5.setter
def cross_sectional_areas_of_air_channel_inlet_5(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
5`"""
self["Cross Sectional Areas of Air Channel Inlet 5"] = value
@property
def zone_6_name(self):
"""field `Zone 6 Name`
Args:
value (str): value for IDD Field `Zone 6 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_6_name` or None if not set
"""
return self["Zone 6 Name"]
@zone_6_name.setter
def zone_6_name(self, value=None):
"""Corresponds to IDD field `Zone 6 Name`"""
self["Zone 6 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_6(self):
"""field `Distance from Top of Thermal Chimney to Inlet 6`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_6` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 6"]
@distance_from_top_of_thermal_chimney_to_inlet_6.setter
def distance_from_top_of_thermal_chimney_to_inlet_6(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 6`"""
self["Distance from Top of Thermal Chimney to Inlet 6"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_6(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 6`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_6` or None if not set
"""
return self["Relative Ratios of Air Flow Rates Passing through Zone 6"]
@relative_ratios_of_air_flow_rates_passing_through_zone_6.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_6(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 6`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 6"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_6(self):
"""field `Cross Sectional Areas of Air Channel Inlet 6`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 6`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_6` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 6"]
@cross_sectional_areas_of_air_channel_inlet_6.setter
def cross_sectional_areas_of_air_channel_inlet_6(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
6`"""
self["Cross Sectional Areas of Air Channel Inlet 6"] = value
@property
def zone_7_name(self):
"""field `Zone 7 Name`
Args:
value (str): value for IDD Field `Zone 7 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_7_name` or None if not set
"""
return self["Zone 7 Name"]
@zone_7_name.setter
def zone_7_name(self, value=None):
"""Corresponds to IDD field `Zone 7 Name`"""
self["Zone 7 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_7(self):
"""field `Distance from Top of Thermal Chimney to Inlet 7`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_7` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 7"]
@distance_from_top_of_thermal_chimney_to_inlet_7.setter
def distance_from_top_of_thermal_chimney_to_inlet_7(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 7`"""
self["Distance from Top of Thermal Chimney to Inlet 7"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_7(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 7`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_7` or None if not set
"""
return self["Relative Ratios of Air Flow Rates Passing through Zone 7"]
@relative_ratios_of_air_flow_rates_passing_through_zone_7.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_7(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 7`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 7"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_7(self):
"""field `Cross Sectional Areas of Air Channel Inlet 7`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 7`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_7` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 7"]
@cross_sectional_areas_of_air_channel_inlet_7.setter
def cross_sectional_areas_of_air_channel_inlet_7(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
7`"""
self["Cross Sectional Areas of Air Channel Inlet 7"] = value
@property
def zone_8_name(self):
"""field `Zone 8 Name`
Args:
value (str): value for IDD Field `Zone 8 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_8_name` or None if not set
"""
return self["Zone 8 Name"]
@zone_8_name.setter
def zone_8_name(self, value=None):
"""Corresponds to IDD field `Zone 8 Name`"""
self["Zone 8 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_8(self):
"""field `Distance from Top of Thermal Chimney to Inlet 8`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_8` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 8"]
@distance_from_top_of_thermal_chimney_to_inlet_8.setter
def distance_from_top_of_thermal_chimney_to_inlet_8(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 8`"""
self["Distance from Top of Thermal Chimney to Inlet 8"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_8(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 8`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_8` or None if not set
"""
return self["Relative Ratios of Air Flow Rates Passing through Zone 8"]
@relative_ratios_of_air_flow_rates_passing_through_zone_8.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_8(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 8`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 8"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_8(self):
"""field `Cross Sectional Areas of Air Channel Inlet 8`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 8`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_8` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 8"]
@cross_sectional_areas_of_air_channel_inlet_8.setter
def cross_sectional_areas_of_air_channel_inlet_8(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
8`"""
self["Cross Sectional Areas of Air Channel Inlet 8"] = value
@property
def zone_9_name(self):
"""field `Zone 9 Name`
Args:
value (str): value for IDD Field `Zone 9 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_9_name` or None if not set
"""
return self["Zone 9 Name"]
@zone_9_name.setter
def zone_9_name(self, value=None):
"""Corresponds to IDD field `Zone 9 Name`"""
self["Zone 9 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_9(self):
"""field `Distance from Top of Thermal Chimney to Inlet 9`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_9` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 9"]
@distance_from_top_of_thermal_chimney_to_inlet_9.setter
def distance_from_top_of_thermal_chimney_to_inlet_9(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 9`"""
self["Distance from Top of Thermal Chimney to Inlet 9"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_9(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 9`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_9` or None if not set
"""
return self["Relative Ratios of Air Flow Rates Passing through Zone 9"]
@relative_ratios_of_air_flow_rates_passing_through_zone_9.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_9(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 9`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 9"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_9(self):
"""field `Cross Sectional Areas of Air Channel Inlet 9`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 9`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_9` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 9"]
@cross_sectional_areas_of_air_channel_inlet_9.setter
def cross_sectional_areas_of_air_channel_inlet_9(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
9`"""
self["Cross Sectional Areas of Air Channel Inlet 9"] = value
@property
def zone_10_name(self):
"""field `Zone 10 Name`
Args:
value (str): value for IDD Field `Zone 10 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_10_name` or None if not set
"""
return self["Zone 10 Name"]
@zone_10_name.setter
def zone_10_name(self, value=None):
"""Corresponds to IDD field `Zone 10 Name`"""
self["Zone 10 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_10(self):
"""field `Distance from Top of Thermal Chimney to Inlet 10`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_10` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 10"]
@distance_from_top_of_thermal_chimney_to_inlet_10.setter
def distance_from_top_of_thermal_chimney_to_inlet_10(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 10`"""
self["Distance from Top of Thermal Chimney to Inlet 10"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_10(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 10`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_10` or None if not set
"""
return self[
"Relative Ratios of Air Flow Rates Passing through Zone 10"]
@relative_ratios_of_air_flow_rates_passing_through_zone_10.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_10(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 10`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 10"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_10(self):
"""field `Cross Sectional Areas of Air Channel Inlet 10`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 10`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_10` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 10"]
@cross_sectional_areas_of_air_channel_inlet_10.setter
def cross_sectional_areas_of_air_channel_inlet_10(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
10`"""
self["Cross Sectional Areas of Air Channel Inlet 10"] = value
@property
def zone_11_name(self):
"""field `Zone 11 Name`
Args:
value (str): value for IDD Field `Zone 11 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_11_name` or None if not set
"""
return self["Zone 11 Name"]
@zone_11_name.setter
def zone_11_name(self, value=None):
"""Corresponds to IDD field `Zone 11 Name`"""
self["Zone 11 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_11(self):
"""field `Distance from Top of Thermal Chimney to Inlet 11`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_11` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 11"]
@distance_from_top_of_thermal_chimney_to_inlet_11.setter
def distance_from_top_of_thermal_chimney_to_inlet_11(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 11`"""
self["Distance from Top of Thermal Chimney to Inlet 11"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_11(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 11`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_11` or None if not set
"""
return self[
"Relative Ratios of Air Flow Rates Passing through Zone 11"]
@relative_ratios_of_air_flow_rates_passing_through_zone_11.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_11(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 11`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 11"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_11(self):
"""field `Cross Sectional Areas of Air Channel Inlet 11`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 11`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_11` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 11"]
@cross_sectional_areas_of_air_channel_inlet_11.setter
def cross_sectional_areas_of_air_channel_inlet_11(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
11`"""
self["Cross Sectional Areas of Air Channel Inlet 11"] = value
@property
def zone_12_name(self):
"""field `Zone 12 Name`
Args:
value (str): value for IDD Field `Zone 12 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_12_name` or None if not set
"""
return self["Zone 12 Name"]
@zone_12_name.setter
def zone_12_name(self, value=None):
"""Corresponds to IDD field `Zone 12 Name`"""
self["Zone 12 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_12(self):
"""field `Distance from Top of Thermal Chimney to Inlet 12`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_12` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 12"]
@distance_from_top_of_thermal_chimney_to_inlet_12.setter
def distance_from_top_of_thermal_chimney_to_inlet_12(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 12`"""
self["Distance from Top of Thermal Chimney to Inlet 12"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_12(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 12`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_12` or None if not set
"""
return self[
"Relative Ratios of Air Flow Rates Passing through Zone 12"]
@relative_ratios_of_air_flow_rates_passing_through_zone_12.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_12(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 12`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 12"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_12(self):
"""field `Cross Sectional Areas of Air Channel Inlet 12`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 12`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_12` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 12"]
@cross_sectional_areas_of_air_channel_inlet_12.setter
def cross_sectional_areas_of_air_channel_inlet_12(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
12`"""
self["Cross Sectional Areas of Air Channel Inlet 12"] = value
@property
def zone_13_name(self):
"""field `Zone 13 Name`
Args:
value (str): value for IDD Field `Zone 13 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_13_name` or None if not set
"""
return self["Zone 13 Name"]
@zone_13_name.setter
def zone_13_name(self, value=None):
"""Corresponds to IDD field `Zone 13 Name`"""
self["Zone 13 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_13(self):
"""field `Distance from Top of Thermal Chimney to Inlet 13`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_13` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 13"]
@distance_from_top_of_thermal_chimney_to_inlet_13.setter
def distance_from_top_of_thermal_chimney_to_inlet_13(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 13`"""
self["Distance from Top of Thermal Chimney to Inlet 13"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_13(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 13`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_13` or None if not set
"""
return self[
"Relative Ratios of Air Flow Rates Passing through Zone 13"]
@relative_ratios_of_air_flow_rates_passing_through_zone_13.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_13(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 13`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 13"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_13(self):
"""field `Cross Sectional Areas of Air Channel Inlet 13`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 13`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_13` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 13"]
@cross_sectional_areas_of_air_channel_inlet_13.setter
def cross_sectional_areas_of_air_channel_inlet_13(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
13`"""
self["Cross Sectional Areas of Air Channel Inlet 13"] = value
@property
def zone_14_name(self):
"""field `Zone 14 Name`
Args:
value (str): value for IDD Field `Zone 14 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_14_name` or None if not set
"""
return self["Zone 14 Name"]
@zone_14_name.setter
def zone_14_name(self, value=None):
"""Corresponds to IDD field `Zone 14 Name`"""
self["Zone 14 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_14(self):
"""field `Distance from Top of Thermal Chimney to Inlet 14`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_14` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 14"]
@distance_from_top_of_thermal_chimney_to_inlet_14.setter
def distance_from_top_of_thermal_chimney_to_inlet_14(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 14`"""
self["Distance from Top of Thermal Chimney to Inlet 14"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_14(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 14`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_14` or None if not set
"""
return self[
"Relative Ratios of Air Flow Rates Passing through Zone 14"]
@relative_ratios_of_air_flow_rates_passing_through_zone_14.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_14(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 14`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 14"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_14(self):
"""field `Cross Sectional Areas of Air Channel Inlet 14`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 14`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_14` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 14"]
@cross_sectional_areas_of_air_channel_inlet_14.setter
def cross_sectional_areas_of_air_channel_inlet_14(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
14`"""
self["Cross Sectional Areas of Air Channel Inlet 14"] = value
@property
def zone_15_name(self):
"""field `Zone 15 Name`
Args:
value (str): value for IDD Field `Zone 15 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_15_name` or None if not set
"""
return self["Zone 15 Name"]
@zone_15_name.setter
def zone_15_name(self, value=None):
"""Corresponds to IDD field `Zone 15 Name`"""
self["Zone 15 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_15(self):
"""field `Distance from Top of Thermal Chimney to Inlet 15`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_15` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 15"]
@distance_from_top_of_thermal_chimney_to_inlet_15.setter
def distance_from_top_of_thermal_chimney_to_inlet_15(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 15`"""
self["Distance from Top of Thermal Chimney to Inlet 15"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_15(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 15`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_15` or None if not set
"""
return self[
"Relative Ratios of Air Flow Rates Passing through Zone 15"]
@relative_ratios_of_air_flow_rates_passing_through_zone_15.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_15(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 15`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 15"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_15(self):
"""field `Cross Sectional Areas of Air Channel Inlet 15`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 15`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_15` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 15"]
@cross_sectional_areas_of_air_channel_inlet_15.setter
def cross_sectional_areas_of_air_channel_inlet_15(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
15`"""
self["Cross Sectional Areas of Air Channel Inlet 15"] = value
@property
def zone_16_name(self):
"""field `Zone 16 Name`
Args:
value (str): value for IDD Field `Zone 16 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_16_name` or None if not set
"""
return self["Zone 16 Name"]
@zone_16_name.setter
def zone_16_name(self, value=None):
"""Corresponds to IDD field `Zone 16 Name`"""
self["Zone 16 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_16(self):
"""field `Distance from Top of Thermal Chimney to Inlet 16`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_16` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 16"]
@distance_from_top_of_thermal_chimney_to_inlet_16.setter
def distance_from_top_of_thermal_chimney_to_inlet_16(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 16`"""
self["Distance from Top of Thermal Chimney to Inlet 16"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_16(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 16`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_16` or None if not set
"""
return self[
"Relative Ratios of Air Flow Rates Passing through Zone 16"]
@relative_ratios_of_air_flow_rates_passing_through_zone_16.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_16(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 16`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 16"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_16(self):
"""field `Cross Sectional Areas of Air Channel Inlet 16`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 16`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_16` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 16"]
@cross_sectional_areas_of_air_channel_inlet_16.setter
def cross_sectional_areas_of_air_channel_inlet_16(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
16`"""
self["Cross Sectional Areas of Air Channel Inlet 16"] = value
@property
def zone_17_name(self):
"""field `Zone 17 Name`
Args:
value (str): value for IDD Field `Zone 17 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_17_name` or None if not set
"""
return self["Zone 17 Name"]
@zone_17_name.setter
def zone_17_name(self, value=None):
"""Corresponds to IDD field `Zone 17 Name`"""
self["Zone 17 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_17(self):
"""field `Distance from Top of Thermal Chimney to Inlet 17`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_17` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 17"]
@distance_from_top_of_thermal_chimney_to_inlet_17.setter
def distance_from_top_of_thermal_chimney_to_inlet_17(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 17`"""
self["Distance from Top of Thermal Chimney to Inlet 17"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_17(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 17`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_17` or None if not set
"""
return self[
"Relative Ratios of Air Flow Rates Passing through Zone 17"]
@relative_ratios_of_air_flow_rates_passing_through_zone_17.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_17(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 17`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 17"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_17(self):
"""field `Cross Sectional Areas of Air Channel Inlet 17`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 17`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_17` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 17"]
@cross_sectional_areas_of_air_channel_inlet_17.setter
def cross_sectional_areas_of_air_channel_inlet_17(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
17`"""
self["Cross Sectional Areas of Air Channel Inlet 17"] = value
@property
def zone_18_name(self):
"""field `Zone 18 Name`
Args:
value (str): value for IDD Field `Zone 18 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_18_name` or None if not set
"""
return self["Zone 18 Name"]
@zone_18_name.setter
def zone_18_name(self, value=None):
"""Corresponds to IDD field `Zone 18 Name`"""
self["Zone 18 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_18(self):
"""field `Distance from Top of Thermal Chimney to Inlet 18`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_18` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 18"]
@distance_from_top_of_thermal_chimney_to_inlet_18.setter
def distance_from_top_of_thermal_chimney_to_inlet_18(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 18`"""
self["Distance from Top of Thermal Chimney to Inlet 18"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_18(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 18`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_18` or None if not set
"""
return self[
"Relative Ratios of Air Flow Rates Passing through Zone 18"]
@relative_ratios_of_air_flow_rates_passing_through_zone_18.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_18(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 18`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 18"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_18(self):
"""field `Cross Sectional Areas of Air Channel Inlet 18`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 18`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_18` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 18"]
@cross_sectional_areas_of_air_channel_inlet_18.setter
def cross_sectional_areas_of_air_channel_inlet_18(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
18`"""
self["Cross Sectional Areas of Air Channel Inlet 18"] = value
@property
def zone_19_name(self):
"""field `Zone 19 Name`
Args:
value (str): value for IDD Field `Zone 19 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_19_name` or None if not set
"""
return self["Zone 19 Name"]
@zone_19_name.setter
def zone_19_name(self, value=None):
"""Corresponds to IDD field `Zone 19 Name`"""
self["Zone 19 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_19(self):
"""field `Distance from Top of Thermal Chimney to Inlet 19`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_19` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 19"]
@distance_from_top_of_thermal_chimney_to_inlet_19.setter
def distance_from_top_of_thermal_chimney_to_inlet_19(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 19`"""
self["Distance from Top of Thermal Chimney to Inlet 19"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_19(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 19`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_19` or None if not set
"""
return self[
"Relative Ratios of Air Flow Rates Passing through Zone 19"]
@relative_ratios_of_air_flow_rates_passing_through_zone_19.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_19(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 19`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 19"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_19(self):
"""field `Cross Sectional Areas of Air Channel Inlet 19`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 19`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_19` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 19"]
@cross_sectional_areas_of_air_channel_inlet_19.setter
def cross_sectional_areas_of_air_channel_inlet_19(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
19`"""
self["Cross Sectional Areas of Air Channel Inlet 19"] = value
@property
def zone_20_name(self):
"""field `Zone 20 Name`
Args:
value (str): value for IDD Field `Zone 20 Name`
Raises:
ValueError: if `value` is not a valid value
Returns:
str: the value of `zone_20_name` or None if not set
"""
return self["Zone 20 Name"]
@zone_20_name.setter
def zone_20_name(self, value=None):
"""Corresponds to IDD field `Zone 20 Name`"""
self["Zone 20 Name"] = value
@property
def distance_from_top_of_thermal_chimney_to_inlet_20(self):
"""field `Distance from Top of Thermal Chimney to Inlet 20`
| Units: m
Args:
value (float): value for IDD Field `Distance from Top of Thermal Chimney to Inlet 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `distance_from_top_of_thermal_chimney_to_inlet_20` or None if not set
"""
return self["Distance from Top of Thermal Chimney to Inlet 20"]
@distance_from_top_of_thermal_chimney_to_inlet_20.setter
def distance_from_top_of_thermal_chimney_to_inlet_20(self, value=None):
"""Corresponds to IDD field `Distance from Top of Thermal Chimney to
Inlet 20`"""
self["Distance from Top of Thermal Chimney to Inlet 20"] = value
@property
def relative_ratios_of_air_flow_rates_passing_through_zone_20(self):
"""field `Relative Ratios of Air Flow Rates Passing through Zone 20`
| value <= 1.0
Args:
value (float): value for IDD Field `Relative Ratios of Air Flow Rates Passing through Zone 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `relative_ratios_of_air_flow_rates_passing_through_zone_20` or None if not set
"""
return self[
"Relative Ratios of Air Flow Rates Passing through Zone 20"]
@relative_ratios_of_air_flow_rates_passing_through_zone_20.setter
def relative_ratios_of_air_flow_rates_passing_through_zone_20(
self,
value=None):
"""Corresponds to IDD field `Relative Ratios of Air Flow Rates Passing
through Zone 20`"""
self[
"Relative Ratios of Air Flow Rates Passing through Zone 20"] = value
@property
def cross_sectional_areas_of_air_channel_inlet_20(self):
"""field `Cross Sectional Areas of Air Channel Inlet 20`
| Units: m2
Args:
value (float): value for IDD Field `Cross Sectional Areas of Air Channel Inlet 20`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `cross_sectional_areas_of_air_channel_inlet_20` or None if not set
"""
return self["Cross Sectional Areas of Air Channel Inlet 20"]
@cross_sectional_areas_of_air_channel_inlet_20.setter
def cross_sectional_areas_of_air_channel_inlet_20(self, value=None):
"""Corresponds to IDD field `Cross Sectional Areas of Air Channel Inlet
20`"""
self["Cross Sectional Areas of Air Channel Inlet 20"] = value
| StarcoderdataPython |
161544 | <filename>code/visualize_zind_cli.py<gh_stars>10-100
# """CLI script to visualize & validate data for the public-facing Zillow Indoor Dataset (ZInD).
#
# Validation includes:
# (1) required JSON fields are presented
# (2) verify non self-intersection of room floor_plan_layouts
# (3) verify that windows/doors/openings lie on the room layout geometry
# (4) verify that windows/doors/openings are defined by two points (left/right boundaries)
# (5) verify that panos_layouts are RGB images with valid FoV ratio (2:1)
#
# Visualization includes:
# (1) render the top-down floor map projection: merged room floor_plan_layouts,WDO and camera centers
# (2) render the room floor_plan_layouts and windows/doors/openings on the pano
#
# Example usage (1): Render all layouts on primary and secondary panos.
# python visualize_zind_cli.py -i <input_folder> -o <output_folder> --visualize-layout --visualize-floor-plan \
# --raw --complete --visible --primary --secondary
#
# Example usage (2): Render all vector layouts using merger (based on raw or complete) and the final redraw layouts.
# python visualize_zind_cli.py -i <input_folder> -o <output_folder> --visualize-floor-plan --redraw --complete --raw
#
# Example usage (3): Render the raster to vector alignments using merger (based on raw or complete) and final redraw.
# python visualize_zind_cli.py -i <input_folder> -o <output_folder> --visualize-raster --redraw --complete --raw
#
import argparse
import logging
import os
import sys
import traceback
from pathlib import Path
from typing import Dict, Any
from floor_plan import FloorPlan
from render import (
render_room_vertices_on_panos,
render_jpg_image,
render_raster_to_vector_alignment,
)
from tqdm import tqdm
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
LOG = logging.getLogger(__name__)
RENDER_FOLDER = "render_data"
def validate_and_render(
zillow_floor_plan: "FloorPlan",
*,
input_folder: str,
output_folder: str,
args: Dict[str, Any]
):
"""Validate and render various ZInD elements, e.g.
1. Primary/secondary layout and WDO
2. Raw/complete/visible layouts
3. Top-down merger results (draft floor-plan)
4. Top-down redraw results (final floor-plan)
5. Raster to vector alignment results.
:param zillow_floor_plan: ZInD floor plan object.
:param input_folder: Input folder of the current tour.
:param output_folder: Folder where the renderings will be saved.
:param args: Input arguments to the script.
:return: None
"""
# Get the types of floor_plan_layouts that we should render.
geometry_to_visualize = []
if args.raw:
geometry_to_visualize.append("raw")
if args.complete:
geometry_to_visualize.append("complete")
if args.visible:
geometry_to_visualize.append("visible")
if args.redraw:
geometry_to_visualize.append("redraw")
# Get the types of panos_layouts that we should render.
panos_to_visualize = []
if args.primary:
panos_to_visualize.append("primary")
if args.secondary:
panos_to_visualize.append("secondary")
# Render the room shape layouts + WDO on top of the pano textures.
if args.visualize_layout:
for geometry_type in geometry_to_visualize:
if geometry_type == "redraw":
continue
for pano_type in panos_to_visualize:
output_folder_layout = os.path.join(
output_folder, "layout", geometry_type, pano_type
)
os.makedirs(output_folder_layout, exist_ok=True)
panos_list = zillow_floor_plan.panos_layouts[geometry_type][pano_type]
render_room_vertices_on_panos(
input_folder=zillow_floor_plan.input_folder,
panos_list=panos_list,
output_folder=output_folder_layout,
)
# Render the top-down draft floor plan, result of the merger stage.
if args.visualize_floor_plan:
output_folder_floor_plan = os.path.join(output_folder, "floor_plan")
os.makedirs(output_folder_floor_plan, exist_ok=True)
for geometry_type in geometry_to_visualize:
if geometry_type == "visible":
continue
zind_dict = zillow_floor_plan.floor_plan_layouts[geometry_type]
for floor_id, zind_poly_list in zind_dict.items():
output_file_name = os.path.join(
output_folder_floor_plan,
"vector_{}_layout_{}.jpg".format(geometry_type, floor_id),
)
render_jpg_image(
polygon_list=zind_poly_list, jpg_file_name=output_file_name
)
# Render vector geometry on top of the raster floor plan image.
if args.visualize_raster:
output_folder_floor_plan_alignment = os.path.join(
output_folder, "floor_plan_raster_to_vector_alignment"
)
os.makedirs(output_folder_floor_plan_alignment, exist_ok=True)
for geometry_type in geometry_to_visualize:
if geometry_type == "visible":
continue
for (
floor_id,
raster_to_vector_transformation,
) in zillow_floor_plan.floor_plan_to_redraw_transformation.items():
floor_plan_image_path = os.path.join(
input_folder, zillow_floor_plan.floor_plan_image_path[floor_id]
)
zind_poly_list = zillow_floor_plan.floor_plan_layouts[geometry_type][
floor_id
]
output_file_name = os.path.join(
output_folder_floor_plan_alignment,
"raster_to_vector_{}_layout_{}.jpg".format(geometry_type, floor_id),
)
render_raster_to_vector_alignment(
zind_poly_list,
raster_to_vector_transformation,
floor_plan_image_path,
output_file_name,
)
def main():
parser = argparse.ArgumentParser(
description="Visualize & validate Zillow Indoor Dataset (ZInD)"
)
parser.add_argument(
"--input",
"-i",
help="Input JSON file (or folder with ZInD data)",
required=True,
)
parser.add_argument(
"--output",
"-o",
help="Output folder where rendered data will be saved to",
required=True,
)
parser.add_argument(
"--visualize-layout",
action="store_true",
help="Render room vertices and WDO on panoramas.",
)
parser.add_argument(
"--visualize-floor-plan",
action="store_true",
help="Render the floor plans as top-down projections with floor plan layouts and WDO elements.",
)
parser.add_argument(
"--visualize-raster",
action="store_true",
help="Render the vector floor plan (draft or final) on the raster floor plan image.",
)
parser.add_argument(
"--max-tours", default=float("inf"), help="Max tours to process."
)
parser.add_argument(
"--primary", action="store_true", help="Visualize primary panoramas."
)
parser.add_argument(
"--secondary", action="store_true", help="Visualize secondary panoramas."
)
parser.add_argument("--raw", action="store_true", help="Visualize raw layout.")
parser.add_argument(
"--complete", action="store_true", help="Visualize complete layout."
)
parser.add_argument(
"--visible", action="store_true", help="Visualize visible layout."
)
parser.add_argument(
"--redraw", action="store_true", help="Visualize 2D redraw geometry."
)
parser.add_argument(
"--debug", "-d", action="store_true", help="Set log level to DEBUG"
)
args = parser.parse_args()
if args.debug:
LOG.setLevel(logging.DEBUG)
input = args.input
# Useful to debug, by restricting the number of tours to process.
max_tours_to_process = args.max_tours
# Collect all the feasible input JSON files.
input_files_list = [input]
if Path(input).is_dir():
input_files_list = sorted(Path(input).glob("**/zind_data.json"))
num_failed = 0
num_success = 0
failed_tours = []
for input_file in tqdm(input_files_list, desc="Validating ZInD data"):
# Try loading and validating the file.
try:
zillow_floor_plan = FloorPlan(input_file)
current_input_folder = os.path.join(str(Path(input_file).parent))
current_output_folder = os.path.join(
args.output, RENDER_FOLDER, str(Path(input_file).parent.stem)
)
os.makedirs(current_output_folder, exist_ok=True)
validate_and_render(
zillow_floor_plan,
input_folder=current_input_folder,
output_folder=current_output_folder,
args=args,
)
num_success += 1
if num_success >= max_tours_to_process:
LOG.info("Max tours to process reached {}".format(num_success))
break
except Exception as ex:
failed_tours.append(str(Path(input_file).parent.stem))
num_failed += 1
track = traceback.format_exc()
LOG.warning("Error validating {}: {}".format(input_file, str(ex)))
LOG.debug(track)
continue
if num_failed > 0:
LOG.warning("Failed to validate: {}".format(num_failed))
LOG.debug("Failed_tours: {}".format(failed_tours))
else:
LOG.info("All ZInD validated successfully")
if __name__ == "__main__":
main()
| StarcoderdataPython |
1629968 | <filename>csgogsi/constants.py
"""
Use these variable to easily compare the values of the payload
"""
NULL: int = -1
NOT_IMPLEMENTED_YET: Exception = NotImplemented
ROUND_WIN_T_BOMB: str = "t_win_bomb"
ROUND_WIN_T_ELIMINATIONS: str = "t_win_elimination"
ROUND_WIN_CT_DEFUSE: str = "ct_win_defuse"
ROUND_WIN_CT_ELIMINATIONS: str = "ct_win_elimination"
ROUND_PHASE_FREEZETIME: str = "freezetime"
ROUND_PHASE_LIVE: str = "live"
ROUND_PHASE_OVER: str = "over"
BOMB_CARRIED: str = "carried"
BOMB_PLANTING: str = "planting"
BOMB_PLANTED: str = "planted"
BOMB_DEFUSED: str = "defused"
BOMB_EXPLODED: str = "exploded"
WIN_TEAM_T: str = "T"
WIN_TEAM_CT: str = "CT"
TEAM_T: str = WIN_TEAM_T
TEAM_CT: str = WIN_TEAM_CT
NO_TEAM: str = ""
PLAYER_NO_CLAN: str = " No clan" # Space useful: no clan can have a space in their name (I guess)
NAME_UNCONNECTED: int = -2
PLAYER_ACTIVITY_PLAYING: str = "playing"
PLAYER_ACTIVITY_MENU: str = "menu"
PLAYER_ACTIVITY_TEXTINPUT: str = "textinput"
PLAYER_ACTIVITY_UNKNOWN: str = "unknown"
WEAPON_HOLSTERED: str = "holstered"
WEAPON_INACTIVE: str = WEAPON_HOLSTERED
WEAPON_ACTIVE: str = "active"
| StarcoderdataPython |
42642 | <reponame>ictcubeMENA/Training_one
import main
import unittest
class testsheep(unittest.TestCase):
def testing(self):
array1 = [True, True, True, False,
True, True, True, True ,
True, False, True, False,
True, False, False, True ,
True, True, True, True ,
False, False, True, True ];
self.assertEqual(main.count_sheeps(array1), 17, "There are 17 sheeps in total, not %s" % count_sheeps(array1))
if __name__ == '__main__':
unittest.main() | StarcoderdataPython |
1761857 | import math
import numpy as np
import os
import datetime
import torch
import torch.optim as optim
import torch.nn.functional as F
# from tqdm import tqdm
from torchvision import transforms, datasets
from net import Net
from itertools import takewhile
import matplotlib.pyplot as plt
# MAX_SAVEPOINTS = 10
CLASSES = ('plane', 'car', 'bird', 'cat',
'deer', 'dog', 'frog', 'horse', 'ship', 'truck')
PRINT_AFTER_X_BATCHES = 50
class Training():
def __init__(self, lr=0.0001, momentum=0.0, savepoint_dir="savepoints", sp_serial=-1, no_cuda=False, batch_size=10, num_workers=2, weight_decay=0.0):
self.batch_size = batch_size
self.num_workers = num_workers
self.sp_serial = sp_serial
# self.savepoint_dir = savepoint_dir
self.net = Net(classes_number=len(CLASSES))
if (not no_cuda) and torch.cuda.is_available():
self.net.cuda()
self.device = "cuda"
print(f"Device :: CUDA {torch.cuda.get_device_name()}")
else:
self.device = "cpu"
print(f"Device :: CPU")
# TODO: dynamic learning rate
# Define optimizer AFTER device is set
self.optimizer = optim.RMSprop(self.net.parameters(), lr=lr, momentum=momentum, weight_decay=weight_decay)
# self.optimizer = optim.Adam(self.net.parameters(), lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False)
self.criterion = torch.nn.CrossEntropyLoss()
self.transforms = transforms.Compose([
transforms.Grayscale(1),
transforms.RandomAffine(0, translate=(.1, .1)),
transforms.ToTensor(),
transforms.Normalize((0,), (1,)),
# TODO: ZCA whitening with: transforms.LinearTransformation()
])
'''
# load savepoints if available
savepoints = os.listdir(self.savepoint_dir) if os.path.isdir(
self.savepoint_dir) else []
if not savepoints == []:
self._loadSavepoint(savepoints)
else:
print("No savepoints found!")
'''
# TODO: Use actual dataset
self.trainset = datasets.CIFAR10(root='./data', train=True, download=True, transform=self.transforms)
self.trainloader = torch.utils.data.DataLoader(self.trainset, batch_size=self.batch_size, shuffle=True, num_workers=self.num_workers)
self.testset = datasets.CIFAR10(root='./data', train=False, download=True, transform=self.transforms)
self.testloader = torch.utils.data.DataLoader(self.testset, batch_size=self.batch_size, shuffle=False, num_workers=self.num_workers)
def run(self, epochs=1):
# TODO: Save and load epochs from savepoint
while True:
print("Starting training!")
self.net.train() # switch to train mode
# for each epoch
for epoch in range(epochs):
print(f"Epoch {epoch+1} of {epochs}:")
running_loss = 0.0
# for each batch
for i, data in enumerate(self.trainloader):
inputs, targets = data
# Show first image for testing transforms
# for index, i in enumerate(inputs):
# img = i.numpy()[0]
# plt.imshow(img, cmap="gray")
# plt.title(CLASSES[targets[index]])
# plt.show()
# exit()
if self.device == "cuda":
inputs = inputs.cuda()
targets = targets.cuda()
# Forward pass
outputs = self.net(inputs)
loss = self.criterion(outputs, targets)
if math.isnan(loss.item()):
print(" ############# Loss is NaN #############")
print("Outputs: ")
print(outputs)
print("Loss: ")
print(loss)
exit(-1)
# Backpropagation pass
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
# if math.isnan(loss.item()):
# print(loss)
# print(outputs)
# exit()
running_loss += loss.item()
if i % PRINT_AFTER_X_BATCHES == PRINT_AFTER_X_BATCHES-1:
print('[%d, %5d] loss: %.3f' %
(epoch + 1, i + 1, running_loss / PRINT_AFTER_X_BATCHES))
# print(outputs)
running_loss = 0.0
# self._makeSavepoint()
print("Finished training!")
self.evaluate()
def evaluate(self):
self.net.eval()
correct = 0
total = 0
with torch.no_grad():
for data in self.testloader:
images, labels = data
outputs = self.net(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
print("Accuracy of the network on %d test images: %d %%" %
(100 * correct / total))
'''
def _loadSavepoint(self, savepoints):
if not os.path.isdir(self.savepoint_dir):
return
target_file = None
ser_files = self._getSavepointList()
if len(ser_files) == 0:
print("No existing savepoints!")
return
if self.sp_serial > -1:
for n, f in ser_files:
if n == self.sp_serial:
target_file = f
else:
self.sp_serial, target_file = ser_files[-1]
print(f"Loading progress from {target_file}!")
self.net.load_state_dict(torch.load(
os.path.join(self.savepoint_dir, target_file)))
self.net.eval()
def _makeSavepoint(self):
if not os.path.isdir(self.savepoint_dir):
os.mkdir(self.savepoint_dir)
target_path = os.path.join(
self.savepoint_dir, self._getNextSavepointPath())
print(f"Saving progress in {target_path}!")
torch.save(self.net.state_dict(), target_path)
self._removeOldSavepoints()
def _getSavepointList(self):
# only look @ .pt and .pth files
path_files = [f for f in os.listdir(self.savepoint_dir) if f[-4:]
== ".pth" or f[-3:] == ".pt"]
# parse serial number
ser_files = [(int(''.join([t for t in takewhile(lambda x: x != '_', f)])), f)
for f in path_files]
# sort in place
ser_files.sort()
return ser_files
def _getNextSavepointPath(self):
sn = self.sp_serial + 1
fn = "%03d_savepoint.pth" % sn
current_files = os.listdir(self.savepoint_dir)
while fn in current_files:
sn = sn + 1
fn = "%03d_savepoint.pth" % sn
self.sp_serial = sn
return fn
def _removeOldSavepoints(self):
files = self._getSavepointList()
# files :: [(sn :: Int, path :: String)] sorted
while len(files) > MAX_SAVEPOINTS:
t = files[0][1]
os.remove(os.path.join(self.savepoint_dir, t))
print(
f"Removing old savepoint: {t}")
files = files[1:]
''' | StarcoderdataPython |
3355393 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from econml._ortho_learner import _OrthoLearner, _crossfit
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression, LassoCV, Lasso
from sklearn.model_selection import KFold
import numpy as np
import unittest
import joblib
import pytest
class TestOrthoLearner(unittest.TestCase):
def test_crossfit(self):
class Wrapper:
def __init__(self, model):
self._model = model
def fit(self, X, y, Q, W=None):
self._model.fit(X, y)
return self
def predict(self, X, y, Q, W=None):
return self._model.predict(X), y - self._model.predict(X), X
def score(self, X, y, Q, W=None):
return self._model.score(X, y)
np.random.seed(123)
X = np.random.normal(size=(5000, 3))
y = X[:, 0] + np.random.normal(size=(5000,))
folds = list(KFold(2).split(X, y))
model = Lasso(alpha=0.01)
nuisance, model_list, fitted_inds, scores = _crossfit(Wrapper(model),
folds,
X, y, y, W=y, Z=None)
np.testing.assert_allclose(nuisance[0][folds[0][1]],
model.fit(X[folds[0][0]], y[folds[0][0]]).predict(X[folds[0][1]]))
np.testing.assert_allclose(nuisance[0][folds[0][0]],
model.fit(X[folds[0][1]], y[folds[0][1]]).predict(X[folds[0][0]]))
np.testing.assert_allclose(scores[0][0], model.fit(X[folds[0][0]], y[folds[0][0]]).score(X[folds[0][1]],
y[folds[0][1]]))
np.testing.assert_allclose(scores[0][1], model.fit(X[folds[0][1]], y[folds[0][1]]).score(X[folds[0][0]],
y[folds[0][0]]))
coef_ = np.zeros(X.shape[1])
coef_[0] = 1
[np.testing.assert_allclose(coef_, mdl._model.coef_, rtol=0, atol=0.08) for mdl in model_list]
np.testing.assert_array_equal(fitted_inds, np.arange(X.shape[0]))
np.random.seed(123)
X = np.random.normal(size=(5000, 3))
y = X[:, 0] + np.random.normal(size=(5000,))
folds = list(KFold(2).split(X, y))
model = Lasso(alpha=0.01)
nuisance, model_list, fitted_inds, scores = _crossfit(Wrapper(model),
folds,
X, y, None, W=y, Z=None)
np.testing.assert_allclose(nuisance[0][folds[0][1]],
model.fit(X[folds[0][0]], y[folds[0][0]]).predict(X[folds[0][1]]))
np.testing.assert_allclose(nuisance[0][folds[0][0]],
model.fit(X[folds[0][1]], y[folds[0][1]]).predict(X[folds[0][0]]))
np.testing.assert_allclose(scores[0][0], model.fit(X[folds[0][0]], y[folds[0][0]]).score(X[folds[0][1]],
y[folds[0][1]]))
np.testing.assert_allclose(scores[0][1], model.fit(X[folds[0][1]], y[folds[0][1]]).score(X[folds[0][0]],
y[folds[0][0]]))
coef_ = np.zeros(X.shape[1])
coef_[0] = 1
[np.testing.assert_allclose(coef_, mdl._model.coef_, rtol=0, atol=0.08) for mdl in model_list]
np.testing.assert_array_equal(fitted_inds, np.arange(X.shape[0]))
np.random.seed(123)
X = np.random.normal(size=(5000, 3))
y = X[:, 0] + np.random.normal(size=(5000,))
folds = list(KFold(2).split(X, y))
model = Lasso(alpha=0.01)
nuisance, model_list, fitted_inds, scores = _crossfit(Wrapper(model),
folds,
X, y, None, W=None, Z=None)
np.testing.assert_allclose(nuisance[0][folds[0][1]],
model.fit(X[folds[0][0]], y[folds[0][0]]).predict(X[folds[0][1]]))
np.testing.assert_allclose(nuisance[0][folds[0][0]],
model.fit(X[folds[0][1]], y[folds[0][1]]).predict(X[folds[0][0]]))
np.testing.assert_allclose(scores[0][0], model.fit(X[folds[0][0]], y[folds[0][0]]).score(X[folds[0][1]],
y[folds[0][1]]))
np.testing.assert_allclose(scores[0][1], model.fit(X[folds[0][1]], y[folds[0][1]]).score(X[folds[0][0]],
y[folds[0][0]]))
coef_ = np.zeros(X.shape[1])
coef_[0] = 1
[np.testing.assert_allclose(coef_, mdl._model.coef_, rtol=0, atol=0.08) for mdl in model_list]
np.testing.assert_array_equal(fitted_inds, np.arange(X.shape[0]))
class Wrapper:
def __init__(self, model):
self._model = model
def fit(self, X, y, W=None):
self._model.fit(X, y)
return self
def predict(self, X, y, W=None):
return self._model.predict(X), y - self._model.predict(X), X
np.random.seed(123)
X = np.random.normal(size=(5000, 3))
y = X[:, 0] + np.random.normal(size=(5000,))
folds = [(np.arange(X.shape[0] // 2), np.arange(X.shape[0] // 2, X.shape[0])),
(np.arange(X.shape[0] // 2), np.arange(X.shape[0] // 2, X.shape[0]))]
model = Lasso(alpha=0.01)
with pytest.raises(AttributeError) as e_info:
nuisance, model_list, fitted_inds, scores = _crossfit(Wrapper(model),
folds,
X, y, W=y, Z=None)
np.random.seed(123)
X = np.random.normal(size=(5000, 3))
y = X[:, 0] + np.random.normal(size=(5000,))
folds = [(np.arange(X.shape[0]), np.arange(X.shape[0]))]
model = Lasso(alpha=0.01)
with pytest.raises(AttributeError) as e_info:
nuisance, model_list, fitted_inds, scores = _crossfit(Wrapper(model),
folds,
X, y, W=y, Z=None)
def test_ol(self):
class ModelNuisance:
def __init__(self, model_t, model_y):
self._model_t = model_t
self._model_y = model_y
def fit(self, Y, T, W=None):
self._model_t.fit(W, T)
self._model_y.fit(W, Y)
return self
def predict(self, Y, T, W=None):
return Y - self._model_y.predict(W), T - self._model_t.predict(W)
class ModelFinal:
def __init__(self):
return
def fit(self, Y, T, W=None, nuisances=None):
Y_res, T_res = nuisances
self.model = LinearRegression(fit_intercept=False).fit(T_res.reshape(-1, 1), Y_res)
return self
def predict(self, X=None):
return self.model.coef_[0]
def score(self, Y, T, W=None, nuisances=None):
Y_res, T_res = nuisances
return np.mean((Y_res - self.model.predict(T_res.reshape(-1, 1)))**2)
np.random.seed(123)
X = np.random.normal(size=(10000, 3))
sigma = 0.1
y = X[:, 0] + X[:, 1] + np.random.normal(0, sigma, size=(10000,))
est = _OrthoLearner(ModelNuisance(LinearRegression(), LinearRegression()), ModelFinal(),
n_splits=2, discrete_treatment=False, discrete_instrument=False, categories='auto',
random_state=None)
est.fit(y, X[:, 0], W=X[:, 1:])
np.testing.assert_almost_equal(est.const_marginal_effect(), 1, decimal=3)
np.testing.assert_array_almost_equal(est.effect(), np.ones(1), decimal=3)
np.testing.assert_array_almost_equal(est.effect(T0=0, T1=10), np.ones(1) * 10, decimal=2)
np.testing.assert_almost_equal(est.score(y, X[:, 0], W=X[:, 1:]), sigma**2, decimal=3)
np.testing.assert_almost_equal(est.score_, sigma**2, decimal=3)
np.testing.assert_almost_equal(est.model_final.model.coef_[0], 1, decimal=3)
# Nuisance model has no score method, so nuisance_scores_ should be none
assert est.nuisance_scores_ is None
# Test non keyword based calls to fit
np.random.seed(123)
X = np.random.normal(size=(10000, 3))
sigma = 0.1
y = X[:, 0] + X[:, 1] + np.random.normal(0, sigma, size=(10000,))
est = _OrthoLearner(ModelNuisance(LinearRegression(), LinearRegression()), ModelFinal(),
n_splits=2, discrete_treatment=False, discrete_instrument=False,
categories='auto', random_state=None)
# test non-array inputs
est.fit(list(y), list(X[:, 0]), X=None, W=X[:, 1:])
np.testing.assert_almost_equal(est.const_marginal_effect(), 1, decimal=3)
np.testing.assert_array_almost_equal(est.effect(), np.ones(1), decimal=3)
np.testing.assert_array_almost_equal(est.effect(T0=0, T1=10), np.ones(1) * 10, decimal=2)
np.testing.assert_almost_equal(est.score(y, X[:, 0], None, X[:, 1:]), sigma**2, decimal=3)
np.testing.assert_almost_equal(est.score_, sigma**2, decimal=3)
np.testing.assert_almost_equal(est.model_final.model.coef_[0], 1, decimal=3)
# Test custom splitter
np.random.seed(123)
X = np.random.normal(size=(10000, 3))
sigma = 0.1
y = X[:, 0] + X[:, 1] + np.random.normal(0, sigma, size=(10000,))
est = _OrthoLearner(ModelNuisance(LinearRegression(), LinearRegression()), ModelFinal(),
n_splits=KFold(n_splits=3),
discrete_treatment=False, discrete_instrument=False,
categories='auto', random_state=None)
est.fit(y, X[:, 0], X=None, W=X[:, 1:])
np.testing.assert_almost_equal(est.const_marginal_effect(), 1, decimal=3)
np.testing.assert_array_almost_equal(est.effect(), np.ones(1), decimal=3)
np.testing.assert_array_almost_equal(est.effect(T0=0, T1=10), np.ones(1) * 10, decimal=2)
np.testing.assert_almost_equal(est.score(y, X[:, 0], W=X[:, 1:]), sigma**2, decimal=3)
np.testing.assert_almost_equal(est.score_, sigma**2, decimal=3)
np.testing.assert_almost_equal(est.model_final.model.coef_[0], 1, decimal=3)
# Test incomplete set of test folds
np.random.seed(123)
X = np.random.normal(size=(10000, 3))
sigma = 0.1
y = X[:, 0] + X[:, 1] + np.random.normal(0, sigma, size=(10000,))
folds = [(np.arange(X.shape[0] // 2), np.arange(X.shape[0] // 2, X.shape[0]))]
est = _OrthoLearner(ModelNuisance(LinearRegression(), LinearRegression()), ModelFinal(),
n_splits=KFold(n_splits=3), discrete_treatment=False,
discrete_instrument=False, categories='auto', random_state=None)
est.fit(y, X[:, 0], X=None, W=X[:, 1:])
np.testing.assert_almost_equal(est.const_marginal_effect(), 1, decimal=3)
np.testing.assert_array_almost_equal(est.effect(), np.ones(1), decimal=3)
np.testing.assert_array_almost_equal(est.effect(T0=0, T1=10), np.ones(1) * 10, decimal=2)
np.testing.assert_almost_equal(est.score(y, X[:, 0], W=X[:, 1:]), sigma**2, decimal=3)
np.testing.assert_almost_equal(est.score_, sigma**2, decimal=3)
np.testing.assert_almost_equal(est.model_final.model.coef_[0], 1, decimal=3)
def test_ol_no_score_final(self):
class ModelNuisance:
def __init__(self, model_t, model_y):
self._model_t = model_t
self._model_y = model_y
def fit(self, Y, T, W=None):
self._model_t.fit(W, T)
self._model_y.fit(W, Y)
return self
def predict(self, Y, T, W=None):
return Y - self._model_y.predict(W), T - self._model_t.predict(W)
class ModelFinal:
def __init__(self):
return
def fit(self, Y, T, W=None, nuisances=None):
Y_res, T_res = nuisances
self.model = LinearRegression(fit_intercept=False).fit(T_res.reshape(-1, 1), Y_res)
return self
def predict(self, X=None):
return self.model.coef_[0]
np.random.seed(123)
X = np.random.normal(size=(10000, 3))
sigma = 0.1
y = X[:, 0] + X[:, 1] + np.random.normal(0, sigma, size=(10000,))
est = _OrthoLearner(ModelNuisance(LinearRegression(), LinearRegression()), ModelFinal(),
n_splits=2, discrete_treatment=False, discrete_instrument=False,
categories='auto', random_state=None)
est.fit(y, X[:, 0], W=X[:, 1:])
np.testing.assert_almost_equal(est.const_marginal_effect(), 1, decimal=3)
np.testing.assert_array_almost_equal(est.effect(), np.ones(1), decimal=3)
np.testing.assert_array_almost_equal(est.effect(T0=0, T1=10), np.ones(1) * 10, decimal=2)
assert est.score_ is None
np.testing.assert_almost_equal(est.model_final.model.coef_[0], 1, decimal=3)
def test_ol_nuisance_scores(self):
class ModelNuisance:
def __init__(self, model_t, model_y):
self._model_t = model_t
self._model_y = model_y
def fit(self, Y, T, W=None):
self._model_t.fit(W, T)
self._model_y.fit(W, Y)
return self
def predict(self, Y, T, W=None):
return Y - self._model_y.predict(W), T - self._model_t.predict(W)
def score(self, Y, T, W=None):
return (self._model_t.score(W, Y), self._model_y.score(W, T))
class ModelFinal:
def __init__(self):
return
def fit(self, Y, T, W=None, nuisances=None):
Y_res, T_res = nuisances
self.model = LinearRegression(fit_intercept=False).fit(T_res.reshape(-1, 1), Y_res)
return self
def predict(self, X=None):
return self.model.coef_[0]
np.random.seed(123)
X = np.random.normal(size=(10000, 3))
sigma = 0.1
y = X[:, 0] + X[:, 1] + np.random.normal(0, sigma, size=(10000,))
est = _OrthoLearner(ModelNuisance(LinearRegression(), LinearRegression()), ModelFinal(),
n_splits=2, discrete_treatment=False, discrete_instrument=False,
categories='auto', random_state=None)
est.fit(y, X[:, 0], W=X[:, 1:])
np.testing.assert_almost_equal(est.const_marginal_effect(), 1, decimal=3)
np.testing.assert_array_almost_equal(est.effect(), np.ones(1), decimal=3)
np.testing.assert_array_almost_equal(est.effect(T0=0, T1=10), np.ones(1) * 10, decimal=2)
np.testing.assert_almost_equal(est.model_final.model.coef_[0], 1, decimal=3)
nuisance_scores_y = est.nuisance_scores_[0]
nuisance_scores_t = est.nuisance_scores_[1]
assert len(nuisance_scores_y) == len(nuisance_scores_t) == 2 # as many scores as splits
# y scores should be positive, since W predicts Y somewhat
# t scores might not be, since W and T are uncorrelated
np.testing.assert_array_less(0, nuisance_scores_y)
def test_ol_discrete_treatment(self):
class ModelNuisance:
def __init__(self, model_t, model_y):
self._model_t = model_t
self._model_y = model_y
def fit(self, Y, T, W=None):
self._model_t.fit(W, np.matmul(T, np.arange(1, T.shape[1] + 1)))
self._model_y.fit(W, Y)
return self
def predict(self, Y, T, W=None):
return Y - self._model_y.predict(W), T - self._model_t.predict_proba(W)[:, 1:]
class ModelFinal:
def __init__(self):
return
def fit(self, Y, T, W=None, nuisances=None):
Y_res, T_res = nuisances
self.model = LinearRegression(fit_intercept=False).fit(T_res.reshape(-1, 1), Y_res)
return self
def predict(self):
# theta needs to be of dimension (1, d_t) if T is (n, d_t)
return np.array([[self.model.coef_[0]]])
def score(self, Y, T, W=None, nuisances=None):
Y_res, T_res = nuisances
return np.mean((Y_res - self.model.predict(T_res.reshape(-1, 1)))**2)
np.random.seed(123)
X = np.random.normal(size=(10000, 3))
import scipy.special
from sklearn.linear_model import LogisticRegression
T = np.random.binomial(1, scipy.special.expit(X[:, 0]))
sigma = 0.01
y = T + X[:, 0] + np.random.normal(0, sigma, size=(10000,))
est = _OrthoLearner(ModelNuisance(LogisticRegression(solver='lbfgs'), LinearRegression()), ModelFinal(),
n_splits=2, discrete_treatment=True, discrete_instrument=False,
categories='auto', random_state=None)
est.fit(y, T, W=X)
np.testing.assert_almost_equal(est.const_marginal_effect(), 1, decimal=3)
np.testing.assert_array_almost_equal(est.effect(), np.ones(1), decimal=3)
np.testing.assert_almost_equal(est.score(y, T, W=X), sigma**2, decimal=3)
np.testing.assert_almost_equal(est.model_final.model.coef_[0], 1, decimal=3)
| StarcoderdataPython |
3387584 | #!/usr/bin/env/python
# -*- coding: utf-8 -*-
"""
This script defines some useful functions to use in data analysis and visualization
@ <NAME> (<EMAIL>)
"""
def dl_ia_utils_change_directory(path):
"""
path ='path/to/app/'
"""
import os
new_path = os.path.dirname(os.path.dirname(__file__))
new_path = os.chdir(path)
import sys
sys.path.insert(1, path)
def dl_ia_utils_set_up_logger(path):
""" Set up logger
:arg path: path where to store logs example: 'logs\\dl-ia-cla-predictive'
"""
import logging
logger = logging.getLogger(path)
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler('{}.log'.format(path))
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
logger.addHandler(fh)
logging.getLogger().addHandler(logging.StreamHandler()) # to display in console message
# logger.debug('mensaje debug')
# logger.info('mensaje info')
# logger.warning('mensaje warning')
# logger.error('mensaje error')
# logger.critical('mensaje critical')
def dl_ia_utils_systems_info():
""" Function that shows the system properties
"""
import sys
from platform import python_version
print('Python version:{}'.format(python_version()))
print('Python system version:{}'.format(sys.version))
print('Path:{}'.format(sys.executable))
print('Python version info:{}'.format(sys.version_info))
def dl_ia_utils_config_plotly():
""" this function configures the plotly visualization
:return:
"""
import plotly.io as pio
import plotly.graph_objects as go
import plotly.express as px
pio.renderers.default = "browser"
def dl_ia_utils_config_matplotlib():
""" this function configures the matplotlib style
:return:
"""
from matplotlib import rc
rc('font', **{'family': 'sans-serif', 'sans-serif': ['Helvetica']})
rc('font', **{'family': 'serif', 'serif': ['Times']})
rc('text', usetex=True)
def dl_ia_utils_config_pandas():
"""
Allows to show all the columns of a dataframe in the console
Limit pandas warnings
"""
import pandas as pd
import numpy as np
pd.options.mode.chained_assignment = None # default='warn'
desired_width = 350
np.set_printoptions(linewidth=desired_width) # show dataframes in console
pd.set_option('display.max_columns', 10)
def dl_ia_utils_check_folder(path_folder):
""" check that exists a folder, and if not, create it
:param path_folder: string with the path
:return error: error code (0:good, 1:bad)
"""
import os
error = 0
try:
if not os.path.isdir(path_folder):
print('Creating folder: {} '.format(path_folder))
os.mkdir(path_folder)
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_check_folder: ' + str(exception_msg))
error = 1
return error
#############################################################
# ---------- DATA ANALYSIS ---------------------------------#
#############################################################
def dl_ia_utils_memory_usage(df):
""" Calculate and print the memory usage and shape by the dataframe
:param df:
:return:
"""
error = 0
try:
print('{} Data Frame Memory usage: {:2.2f} GB'.format('-' * 20, df.memory_usage(deep=True).sum() / 1000000000))
print('{} Data Frame Shape: {} '.format('-' * 20, df.shape))
except Exception as exception_msg:
error = 1
print('(!) Error in dl_ia_utils_memory_usage: ' + str(exception_msg))
return error
def dl_ia_utils_filter_by_std(df, variable, option):
if option == 2:
df_aux = df[(df[variable] < (df[variable].mean() + 2 * df[variable].std()))
& (df[variable] > (df[variable].mean() - 2 * df[variable].std()))]
elif option == 1:
df_aux = df[(df[variable] < (df[variable].mean() + df[variable].std()))
& (df[variable] > (df[variable].mean() - df[variable].std()))]
print('Rows dropped:{} %'.format(round(100 * (1 - (len(df_aux) / len(df))), 3)))
return df_aux
def dl_ia_utils_subs_zeros_values(y):
""" subs zero values from from an array by values close to zeros 1e-10
e.g.: y = np.array([1,4,2,3,7,8,0,0,8,7,0,0,9,8])
:param y:
:return:
"""
import pandas as pd
df = pd.DataFrame({'y': y})
df.loc[df['y'] == 0, ['y']] = 1e-9
return df['y'].values
def dl_ia_utils_create_datestring(row):
"""
df['date'] = df.apply(lambda row: create_date(row), axis=1)
"""
try:
return row['TIMESTAMP'].strftime('%Y-%m-%d')
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_create_datestring: ' + str(exception_msg))
def dl_ia_utils_create_time_array(start, end, freq):
""" function that creates an array of times
:param start: string with the initial time (e.g.: 00:00:00)
:param end: string with the end time (e.g.: 23:59:59)
:parm freq: string indicating the frequency (e.g.: 15min)
:return: array of time
"""
t = pd.DataFrame({'t': pd.date_range(start=start, end=end, freq=freq)}).t.dt.date
return t
def dl_ia_utils_create_date(row):
""" create date with year, month and day
:param row: lambda variable regarding columns of the dataframe
:return datetime:
"""
import pandas as pd
try:
return pd.Timestamp(int(row['YEAR']), int(row['MONTH']), int(row['DAY']))
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_create_date: ' + str(exception_msg))
def dl_ia_utils_create_time(row):
""" convert values of HOUR and MINUTE to datetime
:param row: lambda variable regarding columns of the dataframe
:return datetime:
"""
import datetime
try:
return datetime.time(int(row['HOUR']), int(row['MINUTE']), int(row['SECOND']))
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_create_time: ' + str(exception_msg))
def dl_ia_utils_create_timestamp(row):
""" create date with year, month and day
:param row: lambda variable regarding columns of the dataframe
:return datetime:
"""
import pandas as pd
try:
return pd.Timestamp(int(row['YEAR']), int(row['MONTH']), int(row['DAY']), int(row['HOUR']), int(row['MINUTE']))
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_create_timestamp: ' + str(exception_msg))
def dl_ia_utils_create_datetime(row):
""" create datetime with hour and minute
:param row: lambda variable regarding columns of the dataframe
:return datetime:
"""
import datetime
try:
return datetime.time(int(row['HOUR']), int(row['MINUTE']))
except Exception as exception_msg:
print('(!) Error in dl_ia_conn_utils_create_datetime: ' + str(exception_msg))
def dl_ia_utils_read_csv_per_chunks(path):
""" This function read a large csv file into a dataframe per chunks
:param path:
:return df: dataframe
"""
import pandas as pd
chunksize_ = 1000
error = 0
try:
TextFileReader = pd.read_csv(path, sep=";", chunksize=chunksize_)
dfList = []
for df in TextFileReader:
dfList.append(df)
df = pd.concat(dfList, sort=False)
return error, df
except Exception as exception_msg:
print("Error in read_csv_per_chunks {}".format(exception_msg))
# raise
error = 1
df = []
return error, df
def dl_ia_utils_vertical_translation(y):
""" detects in exist a zero value and translate the time series with the minimum value
:param y:
:return:
"""
import numpy as np
if np.isin(0, y):
# exists a zero value, find the minimum distinct from zero
delta = np.min(y[y > 0])
# vertical translation
# ym = y + delta
ym = y + 1
return ym
return y
def dl_ia_utils_get_unique_values(df_in):
""" this function calculate the unique values of the column of a data frame
:param df_in: dataframe with the columns of interest
:return dict_out: dictionary with unique values of the columns
"""
import numpy as np
dict_out = dict()
for column in df_in.columns:
dict_out[column] = np.sort(df_in[column].unique())
return dict_out
def dl_ia_utils_quarter_classify(x):
""" classify a variabel x into four cuadrants
:param x: value with values in (0,60)
:return y: values with values in (1,2,3,4)
"""
if x <= 15:
y = 0
if 30 >= x > 15:
y = 15
if 45 >= x > 30:
y = 30
if x > 45:
y = 45
return y
def dl_ia_utils_quarter_groups(x):
""" classify a variabel x into four cuadrants
:param x: value with values in (0,60)
:return y: values with values in (1,2,3,4)
"""
if x <= 15:
y = 1
if 30 >= x > 15:
y = 2
if 45 >= x > 30:
y = 3
if x > 45:
y = 4
return y
def dl_ia_utils_check_null_values(df):
"""
:param df:
:return df:
"""
# check nans
if df.isna().sum().sum() > 0:
print('(!) NAN Values detected')
print(df.isna().sum())
df.dropna(inplace=True)
return df
elif df.isnull().sum().sum() > 0:
print('(!) NULLs Values detected')
print(df.isnull().sum())
df.dropna(inplace=True)
return df
else:
print('Everything ok')
return df
def dl_ia_utils_comm(msg):
""" Funtion to show mesages in terminal
:parm msg: meassge (str)
:return:
"""
print('{} {}'.format('-' * 20, msg))
def dl_ia_utils_quarter_classify(x):
""" classify a variabel x into four cuadrants
:param x: value with values in (0,60)
:return y: values with values in (1,2,3,4)
"""
if x <= 15:
y = 0
if 30 >= x > 15:
y = 15
if 45 >= x > 30:
y = 30
if x > 45:
y = 45
return y
#############################################################
# ------------------- EDA ---------------------------------#
#############################################################
def dl_ia_utils_check_descriptive_statistics(df):
""" calculate descriptive statiscs of a dataframe columns
:param df: dataframe with columns of interest
:return error: error code (0:ok, 1: something wrong)
"""
error = 0
try:
for variable in df.columns:
print('variable:{}{}'.format(' ' * 2, variable))
print('---------------')
print('Mean Value:{}{}'.format(' ' * 2, round(df[variable].mean(), 2)))
print('std Value:{}{}'.format(' ' * 3, round(df[variable].std(), 2)))
print('Q3.:{}{}'.format(' ' * 9, round(df[variable].quantile(0.75), 2)))
print('Max.:{}{}'.format(' ' * 8, round(df[variable].max(), 2)))
print('Q2 :{}{}'.format(' ' * 2, round(df[variable].median(), 2)))
print('Min.:{}{}'.format(' ' * 8, round(df[variable].min(), 2)))
print('Q1.:{}{}'.format(' ' * 9, round(df[variable].quantile(0.25), 2)))
print('IQR.:{}{}'.format(' ' * 8, round(df[variable].quantile(0.75) - df0[variable].quantile(0.25), 2)))
return error
except Exception as exception_msg:
print('{} (!) Error in dl_ia_utils_check_descriptive_statistics: '.format('-' * 20) + str(exception_msg))
error = 1
return error
#############################################################
# ------------------ PLOTS ---------------------------------#
#############################################################
def dl_ia_utils_plot_timeseries(df, var_x, var_y):
"""
:param df:
:param var_x:
:param var_y:
:return:
"""
import plotly.graph_objects as go
show = True
print_ = True
fig = go.Figure()
fig.add_trace(go.Scatter(x=df[var_x],
y=df[var_y],
marker=dict(color='red'),
mode='markers+lines',
name=var_y))
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(title='Time series',
xaxis_title=var_x,
yaxis_title=var_y,
showlegend=True
)
if show:
fig.show()
if print_:
fig.write_html("figures\\timeseries_{}.html".format(var_y))
def dl_ia_utils_plot_line(df, var_x, var_y, var_group):
"""
:param df:
:param var_x:
:param var_y:
:param var_group:
:return:
"""
import plotly.express as px
show = True
print_ = True
fig = px.line(df,
x=var_x,
y=var_y,
color=var_group,
)
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
if show:
fig.show()
if print_:
fig.write_html("figures\\line_plot_simple_{}_{}.html".format(var_x, var_y))
def dl_ia_utils_plot_marginal_dist_plot(df, var_x, var_y):
"""
:param df:
:param var_x:
:param var_y:
:return:
"""
import plotly.express as px
show = True
print_ = True
fig = px.density_heatmap(df,
x=var_x, # title="Click on the legend items!"
y=var_y, # add color="species",
marginal_x="box", # histogram, rug
marginal_y="violin")
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
if show:
fig.show()
if print_:
fig.write_html("figures\\plot_marginal_dist_plot_{}_{}.html".format(var_x, var_y))
def dl_ia_utils_plot_scatter_with_facets(df, var_x, var_y, var_color, var_group):
"""
:param df:
:param var_x:
:param var_y:
:param var_color:
:param var_group:
:return:
"""
import plotly.express as px
show = True
print_ = True
fig = px.scatter(df,
x=var_x,
y=var_y,
color=var_color,
facet_col=var_group,
marginal_x="box") # violin, histogram, rug
fig.update_layout(font=dict(family="Courier New, monospace",
size=18,
color="#7f7f7f"))
if show:
fig.show()
if print_:
fig.write_html("figures\\plot_scatter_with_facets_{}_{}_{}_{}.html".format(var_x, var_y, var_color, var_group))
def dl_ia_utils_plot_multi_timeseries():
start = '2018-05-01 00:00:00'
end = '2018-05-15 00:00:00'
df_aux = df[(df['DATE'] > start) & (df['DATE'] < end)]
fig = go.Figure()
fig.add_trace(go.Scatter(x=df_aux['DATE'],
y=df_aux['TOTAL_VEHICULOS'] * 4,
name='Real'))
fig.add_trace(go.Scatter(x=df_aux['DATE'],
y=df_aux['TOTAL_VEHICULOS_DESCRIPTIVE'] * 4,
name='Descriptive Model (R2:{:2.2f} / MAE:{:2.2f}%)'.format(r2_desc, MAE_desc)))
fig.add_trace(go.Scatter(x=df_aux['DATE'],
y=df_aux['TOTAL_VEHICLES_RF_PREDICTION'] * 4,
name='Random Forest Model (R2:{:2.2f} / MAE:{:2.2f}%)'.format(r2_rf,
MAE_rf)))
fig.add_trace(go.Scatter(x=df_aux['DATE'],
y=df_aux['TOTAL_VEHICLES_NN_PREDICTION'] * 4,
mode='lines',
marker_color='rgba(152, 0, 0, .8)',
name='Neural Network Model (R2:{:2.2f} / MAE:{:2.2f}%)'.format(r2_nn,
MAE_nn)))
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(title='Predictive results - Segment: {} - Dates: {} to {}'.format(segments[seg],
start[0:10],
end[0:10]),
xaxis_title='Time (15 min. resolution)',
yaxis_title='Flow (veh./h)',
showlegend=True
)
fig.update_layout(legend=dict(x=0, y=-0.5, bgcolor="white"))
fig.show()
def dl_ia_utils_plot_multi_timeseries_with_slider():
fig = go.Figure()
fig.add_trace(go.Scatter(x=df['DATE'],
y=df['TOTAL_VEHICULOS'],
name='Real Data'))
fig.add_trace(go.Scatter(x=df['DATE'],
y=df['TOTAL_VEHICULOS_DESCRIPTIVE'],
name='Descriptive Model (R2:{:2.2f} / MAE:{:2.2f}%)'.format(r2_desc, MAE_desc)))
fig.add_trace(go.Scatter(x=df['DATE'],
y=df['TOTAL_VEHICLES_RF_PREDICTION'],
name='Random Forest Predictive Model (R2:{:2.2f} / MAE:{:2.2f}%)'.format(r2_rf,
MAE_rf)))
# Set x-axis title
fig.update_xaxes(title_text="Time")
# Set y-axes titles
fig.update_yaxes(title_text="Total Vehicles")
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(title='AUSOL - Road Traffic Flow - Segment: {}'.format(segments[seg]),
showlegend=True
)
fig.update_layout(legend=dict(x=0, y=-1.0, bgcolor="white"))
# Add range slider
fig.update_layout(
xaxis=go.layout.XAxis(
rangeslider=dict(
visible=True
),
type="date"
)
)
fig.show()
def dl_ia_utils_plot_histogram(df, variable, n_bins_, label):
""" plot a histogram using plotly from a vairable in a dataframe
:param df: Data frame with the variable
:param variable: name of the variable (column name)
:param n_bins_: number of bins of the histogram
:param label: string with a name for the title
:return error: error code 0: everything ok, 1: something happened
"""
import plotly.express as px
import numpy as np
print_ = True
show = True
fontsize_ = 18
error = 0
try:
max_value = int(df[variable].max())
x_axis = np.arange(0, max_value, int(max_value / 20))
fig = px.histogram(df, x=variable, nbins=n_bins_, marginal="box")
fig.update_xaxes(title_text=variable)
fig.update_layout(font=dict(family="Courier New, monospace", size=fontsize_, color="#7f7f7f"))
fig.update_layout(title='Histogram - {} - {}'.format(label, variable))
fig.update_layout(showlegend=True)
fig.update_traces(opacity=0.9)
fig.update_layout(bargap=0.2) # gap between bars of adjacent location coordinates
fig.update_xaxes(ticktext=x_axis, tickvals=x_axis)
if print_:
fig.write_html("figures\\plot_histogram_{}.html".format(variable))
if show:
fig.show()
return error
except Exception as exception_msg:
print('(!) Error in plot_histogram: ' + str(exception_msg))
error = 1
return error
def dl_ia_utils_time_series_plot(time_index, y1, label1, title):
""" Plot one single time series
:param time_index:
:param y1:
:param y2:
:param label1:
:param label2:
:return:
"""
import plotly.graph_objects as go
print_ = True
show = True
# title = 'time_series_comparison'
fig = go.Figure()
fig.add_trace(go.Scatter(x=time_index,
y=y1,
mode='markers+lines',
marker=dict(color='red'),
name='{}'.format(label1)))
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(showlegend=True)
fig.update_yaxes(title_text=label1)
fig.update_xaxes(title_text='Time')
fig.update_layout(title=title)
# Add range slider
fig.update_layout(
xaxis=go.layout.XAxis(
rangeselector=dict(
buttons=list([
dict(count=1,
label="1 day",
step="day",
stepmode="backward"),
dict(count=3,
label="3 day",
step="day",
stepmode="backward"),
dict(count=7,
label="1 week",
step="day",
stepmode="backward"),
])
),
rangeslider=dict(
visible=True
),
type="date"
)
)
if print_:
fig.write_html("figures\\time_series_{}.html".format(label1))
if show:
fig.show()
def dl_ia_utils_time_series_comparison(time_index, y1, y2, label1, label2, title):
""" Plot two time series with the same time index
:param time_index:
:param y1:
:param y2:
:param label1:
:param label2:
:return:
"""
import plotly.graph_objects as go
print_ = True
show = True
# title = 'time_series_comparison'
error = 0
try:
fig = go.Figure()
fig.add_trace(go.Scatter(x=time_index,
y=y1,
mode='markers+lines',
marker=dict(color='red'),
name='{}'.format(label1)))
fig.add_trace(go.Scatter(x=time_index,
y=y2,
mode='markers+lines',
marker=dict(color='blue'),
name='{}'.format(label2)))
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(showlegend=True)
fig.update_yaxes(title_text=label1)
fig.update_xaxes(title_text='Time')
fig.update_layout(title=title)
# Add range slider
fig.update_layout(
xaxis=go.layout.XAxis(
rangeselector=dict(
buttons=list([
dict(count=1,
label="1 day",
step="day",
stepmode="backward"),
dict(count=3,
label="3 day",
step="day",
stepmode="backward"),
dict(count=7,
label="1 week",
step="day",
stepmode="backward"),
])
),
rangeslider=dict(
visible=True
),
type="date"
)
)
if print_:
fig.write_html("figures\\time_series_comparison_{}_{}.html".format(label1, label2))
if show:
fig.show()
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_time_series_comparison: ' + str(exception_msg))
error = 1
return error
def dl_ia_utils_plot_scatterplot_simple(df, var_x, var_y, label, title_):
""" Produce a simple scatter plot with plotly
:param df: dataframe that contains the variables
:param variable_x: variable to plot in x axis
:param variable_y: variable to plot in y axis
:param variable_to_color: variable to use as color
:param variable_to_color: variable to use as size
:return:
"""
import plotly.express as px
print_ = True
show = True
error = 0
try:
fig = px.scatter(df,
x=var_x,
y=var_y, # marker = dict(color='blue')
)
fig.update_xaxes(title_text=var_x)
fig.update_yaxes(title_text=var_y)
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(title='{} - {}'.format(title_, label))
fig.update_layout(showlegend=True)
if print_:
fig.write_html("figures\\{}_{}_{}_{}.html".format(title_, label, var_x, var_y))
if show:
fig.show()
return error
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_plot_scatterplot_simple: ' + str(exception_msg))
error = 1
return error
def dl_ia_utils_plot_scatterplot(df, var_x, var_y, var_color, var_size, label):
""" Produce a simple scatter plot with plotly
:param df: dataframe that contains the variables
:param variable_x: variable to plot in x axis
:param variable_y: variable to plot in y axis
:param variable_to_color: variable to use as color
:param variable_to_color: variable to use as size
:return:
"""
import plotly.express as px
print_ = True
show = True
error = 0
try:
fig = px.scatter(df,
x=var_x,
y=var_y, # marker = dict(color='blue')
size=var_size,
color=var_color)
fig.update_xaxes(title_text=var_x)
fig.update_yaxes(title_text=var_y)
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(title='Scatterplot - {}'.format(label))
fig.update_layout(showlegend=True)
if print_:
fig.write_html("figures\\scatterplot_{}_{}_{}.html".format(label, var_x, var_y))
if show:
fig.show()
return error
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_plot_scatterplot: ' + str(exception_msg))
error = 1
return error
def dl_ia_utils_plot_three_timeseries(df, var_x, var_y1, var_y2, var_y3,
title_, y_label):
""" Plot two time series
:df param: data frame with data
:var_x param: string with x-axis variable. The name is used as label
:var_y1 param: string with first time series to plot. The name is used as axis label
:var_y2 param: string with second time series to plot
:var_y3 param: string with third time series to plot
:title_ param: string with the desired title. It is used to save the html file
:label_y1 param: string for the legend of time series 1
:label_y2 param: string for the legend of time series 2
:return: error
"""
import plotly.graph_objects as go
import plotly.io as pio
import plotly.express as px
# by default in showed in browser, change to 'notebook' if you want
pio.renderers.default = "browser"
show = True
print_ = True
error = 0
custom_x_axis = False
try:
fig = go.Figure()
fig.add_trace(go.Scatter(x=df[var_x],
y=df[var_y1],
line=dict(width=2, dash='dot'),
marker=dict(color='black'),
mode='markers+lines',
name=var_y1))
fig.add_trace(go.Scatter(x=df[var_x],
y=df[var_y2],
line=dict(width=2, dash='dot'),
mode='markers+lines',
marker=dict(color='blue'),
name=var_y2))
fig.add_trace(go.Scatter(x=df[var_x],
y=df[var_y3],
line=dict(width=2, dash='dot'),
mode='markers+lines',
marker=dict(color='red'),
name=var_y3))
fig.update_layout(legend=dict(
orientation="h",
yanchor="bottom",
y=1.0,
xanchor="right",
x=0.4
))
fig.update_layout(font=dict(family="Courier New, monospace", size=16, color="#7f7f7f"))
fig.update_layout(showlegend=True)
fig.update_yaxes(title_text=y_label)
fig.update_xaxes(title_text=var_x)
fig.update_layout(title=title_)
# fig.update_layout(legend_orientation="h")
### update x ticks label
if custom_x_axis:
fig.update_layout(
xaxis=dict(
tickmode='array',
tickvals=df[var_x],
ticktext=df[var_x]
)
)
if show:
fig.show()
if print_:
fig.write_html("figures\\{}.html".format(title_))
return error
except Exception as exception_msg:
error = 1
print('(!) Error in dl_ia_utils_plot_three_timeseries: ' + str(exception_msg))
return error
def dl_ia_utils_plot_two_timeseries(df, var_x, var_y1, var_y2, title_, y_label):
""" Plot two time series
:df param: data frame with data
:var_x param: string with x-axis variable. The name is used as label
:var_y1 param: string with first time series to plot. The name is used as axis label
:var_y2 param: string with second time series to plot
:title_ param: string with the desired title. It is used to save the html file
:label_y1 param: string for the legend of time series 1
:label_y2 param: string for the legend of time series 2
:return: error
"""
import plotly.graph_objects as go
import plotly.io as pio
import plotly.express as px
from IA_resources.dl_ia_utils import dl_ia_utils_config_plotly
dl_ia_utils_config_plotly()
# by default in showed in browser, change to 'notebook' if you want
pio.renderers.default = "browser"
show = True
print_ = True
error = 0
custom_x_axis = False
try:
fig = go.Figure()
fig.add_trace(go.Scatter(x=df[var_x],
y=df[var_y1],
line=dict(width=2, dash='dot'),
marker=dict(color='black'),
mode='markers+lines',
name=var_y1))
fig.add_trace(go.Scatter(x=df[var_x],
y=df[var_y2],
line=dict(width=2, dash='dot'),
mode='markers+lines',
marker=dict(color='blue'),
name=var_y2))
fig.update_layout(legend=dict(
orientation="h",
yanchor="bottom",
y=1.0,
xanchor="right",
x=0.4
))
fig.update_layout(font=dict(family="Courier New, monospace", size=16, color="#7f7f7f"))
fig.update_layout(showlegend=True)
fig.update_yaxes(title_text=y_label)
fig.update_xaxes(title_text=var_x)
fig.update_layout(title=title_)
# fig.update_layout(legend_orientation="h")
### update x ticks label
if custom_x_axis:
fig.update_layout(
xaxis=dict(
tickmode='array',
tickvals=df[var_x],
ticktext=df[var_x]
)
)
if show:
fig.show()
if print_:
fig.write_html("figures\\{}.html".format(title_))
return error
except Exception as exception_msg:
error = 1
print('(!) Error in dl_ia_utils_plot_two_timeseries: ' + str(exception_msg))
return error
def dl_ia_utils_plot_contour(df, title, x_label, y_label):
"""
:return:
"""
import plotly.graph_objects as go
import plotly.io as pio
import plotly.express as px
from IA_resources.dl_ia_utils import dl_ia_utils_config_plotly
dl_ia_utils_config_plotly()
# by default in showed in browser, change to 'notebook' if you want
pio.renderers.default = "browser"
try:
fig = go.Figure(data=
go.Contour(
z=df.values,
x=list(range(df.shape[1])), # horizontal axis
y=list(range(df.shape[0])), # vertical axis
line_smoothing=0.85,
contours=dict(
showlabels=True, # show labels on contours
start=0,
end=18,
size=1)
))
fig.update_layout(title=title,
xaxis_title=x_label,
yaxis_title=y_label)
fig.update_layout(
yaxis=dict(
tickvals=np.arange(0, len(df.index)),
ticktext=df.index
),
xaxis=dict(
tickvals=np.arange(0, len(df.index)),
ticktext=df.columns,
tickangle=90,
tickfont=dict(size=9)
)
)
fig.update_layout(
font=dict(
family="Courier New, monospace",
color="RebeccaPurple",
size=10))
fig.show()
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_plot_contour: ' + str(exception_msg))
def dl_ia_utils_plot_heatmap(df, title, x_label, y_label, x_ticks, y_ticks):
""" Plot heatmap
:df param: dataframe to plot
:title param: string with the title
:x_label param: string with the label of the x axis
:y_label param: string with the label of the y axis
:x_ticks param: list with the ticks of the x axis
:y_ticks param: list with the ticks of the y axis
:return:
"""
import plotly.express as px
import numpy as np
from IA_resources.dl_ia_utils import dl_ia_utils_config_plotly
dl_ia_utils_config_plotly()
try:
fig = px.imshow(df.values)
fig.update_layout(title=title,
yaxis_title=y_label,
xaxis_title=x_label)
fig.update_layout(
yaxis=dict(
tickvals=np.arange(0, len(y_ticks)),
ticktext=y_ticks
),
xaxis=dict(
tickvals=np.arange(0, len(x_ticks)),
ticktext=x_ticks
)
)
fig.update_layout(
font=dict(
family="Courier New, monospace",
color="RebeccaPurple",
size=11))
fig.show()
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_plot_heatmap: ' + str(exception_msg))
def dl_ia_utils__plot_line(df, var_x, var_y, var_color, x_label, y_label, title_):
""" Plot line plot from dataframe
:param df:
:param var_x:
:param var_y:
:param x_label:
:param y_label:
:param title_:
"""
if var_color == None:
fig = px.line(df,
x=var_x,
y=var_y)
else:
fig = px.line(df,
x=var_x,
y=var_y,
color=var_color)
fig.update_layout(title=title_,
xaxis_title=x_label,
yaxis_title=y_label)
fig.update_layout(
font=dict(
family="Courier New, monospace",
color="RebeccaPurple",
size=14))
fig.show()
def dl_ia_utils_plot_scatter(df, var_x, var_y, var_color, x_label, y_label, title_):
""" Plot scatter plot from dataframe
:param df:
:param var_x:
:param var_y:
:param x_label:
:param y_label:
:param title_:
"""
if var_color == None:
fig = px.scatter(df,
x=var_x,
y=var_y)
else:
fig = px.scatter(df,
x=var_x,
y=var_y,
color=var_color)
fig.update_layout(title=title_,
xaxis_title=x_label,
yaxis_title=y_label)
fig.update_layout(
font=dict(
family="Courier New, monospace",
color="RebeccaPurple",
size=14))
fig.show()
def dl_ia_utils_plot_contour(df, title, x_label, y_label):
"""
:return:
"""
import plotly.graph_objects as go
fig = go.Figure(data=
go.Contour(
z=df.values,
x=list(range(df.shape[1])), # horizontal axis
y=list(range(df.shape[0])), # vertical axis
line_smoothing=0.85,
contours=dict(
showlabels=True, # show labels on contours
start=0,
end=18,
size=1)
))
fig.update_layout(title=title,
xaxis_title=x_label,
yaxis_title=y_label)
fig.update_layout(
yaxis=dict(
tickvals=np.arange(0, len(df.index)),
ticktext=df.index
),
xaxis=dict(
tickvals=np.arange(0, len(df.index)),
ticktext=df.columns,
tickangle=90,
tickfont=dict(size=9)
)
)
fig.update_layout(
font=dict(
family="Courier New, monospace",
color="RebeccaPurple",
size=10))
fig.show()
def dl_ia_utils_plot_heatmap(df, title, x_label, y_label, x_ticks, y_ticks):
""" Plot heatmap
:df param: dataframe to plot
:title param: string with the title
:x_label param: string with the label of the x axis
:y_label param: string with the label of the y axis
:x_ticks param: list with the ticks of the x axis
:y_ticks param: list with the ticks of the y axis
:return:
"""
import plotly.express as px
import numpy as np
fig = px.imshow(df.values)
fig.update_layout(title=title,
yaxis_title=y_label,
xaxis_title=x_label)
fig.update_layout(
yaxis=dict(
tickvals=np.arange(0, len(y_ticks)),
ticktext=y_ticks
),
xaxis=dict(
tickvals=np.arange(0, len(x_ticks)),
ticktext=x_ticks
)
)
fig.update_layout(
font=dict(
family="Courier New, monospace",
color="RebeccaPurple",
size=11))
fig.show()
def dl_ia_utils_plot_timeseries_with_slider(df, var_x, var_y, title):
"""
:param df:
:param var_x:
:param var_y:
:return:
"""
import plotly.graph_objects as go
show = True
print_ = True
fig = go.Figure()
fig.add_trace(go.Scatter(x=df[var_x],
y=df[var_y],
mode='markers+lines',
marker=dict(color='red'),
name=var_y))
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(title=title,
xaxis_title=var_x,
yaxis_title=var_y,
showlegend=True
)
# Add range slider
fig.update_layout(
xaxis=go.layout.XAxis(
rangeselector=dict(
buttons=list([
dict(count=1,
label="1d",
step="day",
stepmode="backward"),
dict(count=3,
label="3d",
step="day",
stepmode="backward"),
dict(count=7,
label="1w",
step="day",
stepmode="backward"),
])
),
rangeslider=dict(
visible=True
),
type="date"
)
)
if show:
fig.show()
if print_:
fig.write_html("figures\\timeseries_with_slider_{}.html".format(var_y))
def dl_ia_utils_plot_timeseries(df, var_x, var_y, title_):
"""
:param df:
:param var_x:
:param var_y:
:return:
"""
import plotly.graph_objects as go
show = True
print_ = True
try:
fig = go.Figure()
fig.add_trace(go.Scatter(x=df[var_x],
y=df[var_y],
marker=dict(color='red'),
mode='markers+lines',
name=var_y))
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(title='{}'.format(title_),
xaxis_title=var_x,
yaxis_title=var_y,
showlegend=True
)
if show:
fig.show()
if print_:
fig.write_html("figures\\{}_{}.html".format(title_, var_y))
return error
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_plot_two_timeseries : {} '.format(exception_msg))
error = 1
return error
def dl_ia_utils_plot_two_timeseries(df, var_x, var_y1, var_y2, title_, x_label, y_label):
"""
:param df:
:param var_x:
:param var_y1:
:param var_y2:
:return:
"""
import plotly.graph_objects as go
x_label = 'TIME (15 min)'
y_label = 'FLOW (veh./h)'
show = True
print_ = True
try:
fig = go.Figure()
fig.add_trace(go.Scatter(x=df[var_x],
y=df[var_y1],
marker=dict(color='red'),
mode='markers',
name=var_y1))
fig.add_trace(go.Scatter(x=df[var_x],
marker=dict(color='blue'),
y=df[var_y2],
mode='markers+lines',
name=var_y2))
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(title=title_,
xaxis_title=x_label,
yaxis_title=y_label,
showlegend=True
)
# Add range slider
fig.update_layout(
xaxis=go.layout.XAxis(
rangeselector=dict(
buttons=list([
dict(count=1,
label="1d",
step="day",
stepmode="backward"),
dict(count=3,
label="3d",
step="day",
stepmode="backward"),
dict(count=7,
label="1w",
step="day",
stepmode="backward"),
])
),
rangeslider=dict(
visible=True
),
type="date"
)
)
if show:
fig.show()
if print_:
fig.write_html("figures\\{}_{}_{}.html".format(title_, var_y1, var_y2))
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_plot_two_timeseries : {} '.format(exception_msg))
def dl_ia_utils_plot_line(df, var_x, var_y, var_group, title_):
"""
:param df:
:param var_x:
:param var_y:
:param var_group:
:return:
"""
import plotly.express as px
show = True
print_ = True
try:
fig = px.line(df,
x=var_x,
y=var_y,
color=var_group,
)
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(title='{}- {}'.format(title_, label))
fig.update_layout(showlegend=True)
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
if show:
fig.show()
if print_:
fig.write_html("figures\\{}_{}_{}.html".format(title_, var_x, var_y))
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_plot_two_timeseries : {} '.format(exception_msg))
def dl_ia_utils_plot_scatterplot_simple(df, var_x, var_y, label):
""" Produce a simple scatter plot with plotly
:param df: dataframe that contains the variables
:param variable_x: variable to plot in x axis
:param variable_y: variable to plot in y axis
:param variable_to_color: variable to use as color
:param variable_to_color: variable to use as size
:return:
"""
import plotly.express as px
print_ = True
show = True
error = 0
try:
fig = px.scatter(df,
x=var_x,
y=var_y, # marker = dict(color='blue')
)
fig.update_xaxes(title_text=var_x)
fig.update_yaxes(title_text=var_y)
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(title='Scatterplot - {}'.format(label))
fig.update_layout(showlegend=True)
if print_:
fig.write_html("figures\\scatterplot_{}_{}_{}.html".format(label, var_x, var_y))
if show:
fig.show()
return error
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_plot_scatterplot_simple: ' + str(exception_msg))
error = 1
return error
def dl_ia_utils_plot_scatterplot(df, var_x, var_y, var_color, var_size, label):
""" Produce a simple scatter plot with plotly
:param df: dataframe that contains the variables
:param variable_x: variable to plot in x axis
:param variable_y: variable to plot in y axis
:param variable_to_color: variable to use as color
:param variable_to_color: variable to use as size
:return:
"""
import plotly.express as px
print_ = True
show = True
error = 0
try:
fig = px.scatter(df,
x=var_x,
y=var_y, # marker = dict(color='blue')
size=var_size,
color=var_color)
fig.update_xaxes(title_text=var_x)
fig.update_yaxes(title_text=var_y)
fig.update_layout(font=dict(family="Courier New, monospace", size=18, color="#7f7f7f"))
fig.update_layout(title='Scatterplot - {}'.format(label))
fig.update_layout(showlegend=True)
if print_:
fig.write_html("figures\\scatterplot_{}_{}_{}.html".format(label, var_x, var_y))
if show:
fig.show()
return error
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_plot_scatterplot: ' + str(exception_msg))
error = 1
return error
#############################################################
# --------------- DATA BASE ---------------------------------#
#############################################################
def dl_ia_query_get_data(query, ddbb_settings):
"""
this function perform the connection to the HORUS SQL serverdata base and executes the query provided
:param query: string with the query
:param ddbb_settings: List with DB connection settings (driver, server, database, schema,
user and pass)
:return error:
"""
import pyodbc
import pandas as pd
error = 0
try:
# print('define parameters')
# for a in db_settings_PRO.keys():
# print(a,':', db_settings_PRO[a])
### define conection to DDBB
driver = ddbb_settings['driver']
server = ddbb_settings['server']
database = ddbb_settings['database']
schema = ddbb_settings['schema']
user = ddbb_settings['user']
password = ddbb_settings['pass']
port = ddbb_settings['port']
except Exception as exception_msg:
print('(!) Error in dl_ia_conn_query_get_data: ' + str(exception_msg))
error = 1
df_input = []
return error, df_input
if error == 0:
try:
print(
'Loading data from server:[{}] database:[{}] schema:[{}] port : [{}] '.format(server, database, schema,
port))
### connect do DDBB and get last 6 hours od data
# sql_conn = pyodbc.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER=' + server + ';DATABASE=' + database + ';UID=' + user + ';PWD=' + password + 'Trusted_Connection=yes')
# sql_conn = pyodbc.connect('DRIVER={SQL Server Native Client RDA 11.0};SERVER=' + server + ';DATABASE=' + database + ';UID=' + user + ';PWD=' + password + 'Trusted_Connection=yes')
sql_conn_str = 'DRIVER={};SERVER={},{};DATABASE={};UID={};PWD={}'.format(driver, server, port, database,
user, password)
# print(sql_conn_str)
# sql_conn = pyodbc.connect('DRIVER=' + driver +';SERVER=' + server + ',' + port + ';DATABASE=' + database + ';UID=' + user + ';PWD=' + password)
sql_conn = pyodbc.connect(sql_conn_str)
df_input = pd.read_sql(query, sql_conn)
sql_conn.close()
return error, df_input
except Exception as exception_msg:
print('(!) Error in dl_ia_conn_query_get_data: ' + str(exception_msg))
error = 2
df_input = []
return error, df_input
def dl_ia_utils_initialize_engine(ddbb_settings):
""" Initialize an SQL ALchemy engine
:param ddbb_settings: DIctionary with driver user, pass, server, database, schema
:param engine:
"""
from sqlalchemy.engine import create_engine
try:
engine = create_engine("{}://{}:{}@{}:{}/{}".format(ddbb_settings['driver'],
ddbb_settings['user'],
ddbb_settings['pass'],
ddbb_settings['server'],
ddbb_settings['port'],
ddbb_settings['database']))
print('Engine successfully initialized')
return engine
except Exception as exception_msg:
print('(!) Error in dl_ia_conn_initialize_engine: {}'.format(exception_msg))
engine = []
return engine
def dl_ia_utils_query_get_data(query, ddbb_settings):
"""
this function perform the connection to the HORUS SQL serverdata base and executes the query provided
:param query: string with the query
:param ddbb_settings: List with DB connection settings (driver, server, database, schema,
user and pass)
:return error:
"""
import pyodbc
import pandas as pd
error = 0
try:
# print('define parameters')
# for a in db_settings_PRO.keys():
# print(a,':', db_settings_PRO[a])
### define conection to DDBB
driver = ddbb_settings['driver']
server = ddbb_settings['server']
database = ddbb_settings['database']
schema = ddbb_settings['schema']
user = ddbb_settings['user']
password = ddbb_settings['pass']
port = ddbb_settings['port']
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_query_get_data: ' + str(exception_msg))
error = 1
df_input = []
return error, df_input
if error == 0:
try:
print(
'Loading data from server:[{}] database:[{}] schema:[{}] port : [{}] '.format(server, database, schema,
port))
### connect do DDBB and get last 6 hours od data
# sql_conn = pyodbc.connect('DRIVER={ODBC Driver 13 for SQL Server};SERVER=' + server + ';DATABASE=' + database + ';UID=' + user + ';PWD=' + password + 'Trusted_Connection=yes')
# sql_conn = pyodbc.connect('DRIVER={SQL Server Native Client RDA 11.0};SERVER=' + server + ';DATABASE=' + database + ';UID=' + user + ';PWD=' + password + 'Trusted_Connection=yes')
sql_conn_str = 'DRIVER={};SERVER={},{};DATABASE={};UID={};PWD={}'.format(driver, server, port, database,
user, password)
# print(sql_conn_str)
# sql_conn = pyodbc.connect('DRIVER=' + driver +';SERVER=' + server + ',' + port + ';DATABASE=' + database + ';UID=' + user + ';PWD=' + password)
sql_conn = pyodbc.connect(sql_conn_str)
df_input = pd.read_sql(query, sql_conn)
sql_conn.close()
return error, df_input
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_query_get_data: ' + str(exception_msg))
error = 2
df_input = []
return error, df_input
#############################################################
# ------- MACHINE LEARNING ---------------------------------#
#############################################################
def dl_ia_utils_create_lagged_variables(df, variable, number_lags):
""" create lagged versions of the variable in a dataframe in which each row is an observation
:param df:
:param number_lags:
:return:
"""
number_lags = 23
for lag in range(1, number_lags + 1):
df[variable + '_lag_' + str(lag)] = df[variable].shift(lag)
# if you want numpy arrays with no null values:
df.dropna(inplace=True)
return df
def dl_ia_utils_anomaly_detection_univariate(df, variable):
""" Produce anomaly detection with forest isolation with univariate data
:param df:
:param variable:
"""
from sklearn.ensemble import IsolationForest
import numpy as np
import pandas as pd
error = 0
try:
# instantiate model
isolation_forest = IsolationForest(n_estimators=200)
# fit model
isolation_forest.fit(df[variable].values.reshape(-1, 1))
xx = np.linspace(df[variable].min(), df[variable].max(), len(df)).reshape(-1, 1)
anomaly_score = isolation_forest.decision_function(xx)
# make prediction
outlier = isolation_forest.predict(xx)
df_out = pd.DataFrame({'X': xx.T.ravel(), 'outlier': outlier.ravel(), 'anomaly_score': anomaly_score.ravel()})
lower_limit_outlier = df_out[df_out['outlier'] == -1]['X'].min()
upper_limit_outlier = df_out[df_out['outlier'] == -1]['X'].max()
print('lower limit outlier:{}'.format(lower_limit_outlier))
print('upper limit outlier:{}'.format(upper_limit_outlier))
return error, df_out
# plot
if 0 == 1:
import matplotlib.pyplot as plt
plt.figure(figsize=(10, 4))
plt.plot(xx, anomaly_score, label='anomaly score')
plt.fill_between(xx.T[0],
np.min(anomaly_score),
np.max(anomaly_score),
where=outlier == -1,
color='r',
alpha=.4,
label='outlier region')
plt.legend()
plt.ylabel('anomaly score')
plt.xlabel('Sales')
plt.show();
except Exception as exception_msg:
print('(!) Error in dl_ia_utils_anomaly_detection_univariate: ' + str(exception_msg))
error = 1
return error
def dl_ia_utils_variability_captured(y, y_hat):
""" function to calculate the varibility captured or explained variance
:param y:
:param y_hat:
:return:
"""
import numpy as np
return round(1 - np.var(y_hat - y) / np.var(y_hat), 3)
def dl_ia_utils_mape(y_true, y_pred):
""" function to calculate the Mean Absolute Percentage Error
Zero values are treated by vertical translation
:param y:
:param y_hat:
:return:
"""
import numpy as np
from IA_resources.dl_ia_utils import dl_ia_utils_vertical_translation
y_true = vertical_translation(y_true) # vertical translation +1
y_pred = vertical_translation(y_pred) # vertical translation +1
y_true, y_pred = np.array(y_true), np.array(y_pred)
return round(np.mean(np.abs((y_true - y_pred) / y_true)) * 100, 3)
def dl_ia_utils_regression_evaluation(y_hat, y):
""" Evaluate regression metrics
:param y_hat:
:param y:
:return:
"""
from IA_resources.dl_ia_utils import dl_ia_utils_variability_captured
from IA_resources.dl_ia_utils import dl_ia_utils_mape
from sklearn.metrics import r2_score, mean_absolute_error, explained_variance_score, mean_squared_error
R2 = round(r2_score(y_hat, y), 3)
MAE = round(mean_absolute_error(y_hat, y), 3)
MSE = round(mean_squared_error(y_hat, y), 3)
EV = round(explained_variance_score(y_hat, y), 3)
VC = round(dl_ia_utils_variability_captured(y_hat, y), 3)
errors = abs(y_hat - y)
# MAPE = 100 * np.mean(errors / y)
MAPE = dl_ia_utils_mape(y_hat, y)
accuracy = 100 - MAPE
print('Regression Metrics'.format(type))
print('R2 = {:0.2f}'.format(R2))
print('EV = {:0.2f}'.format(VC))
print('Variability = {:0.2f}'.format(EV))
print('MSE = {:0.2f}'.format(MSE))
print('MAE = {:0.2f}'.format(MAE))
print('MAPE: {:0.4f} %'.format(MAPE))
print('Accuracy = {:0.2f} %.'.format(accuracy))
return R2, MAE, MSE, EV, VC
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.