ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | b416b4e28fe1c81d831619dfd507fef57902baed | # Ported from other Telegram UserBots for TeleBot
# Kangers, don't remove this line
# @
from userbot import CMD_LIST
from userbot import ALIVE_NAME
DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else "لم يتم تعيين أي اسم بعد ، تحقق من التثبيت @meaallh100"
@command(pattern="^.help ?(.*)")
async def cmd_list(event):
if not event.text[0].isalpha() and event.text[0] not in ("/", "#", "@", "!"):
tgbotusername = Var.TG_BOT_USER_NAME_BF_HER
input_str = event.pattern_match.group(1)
if tgbotusername is None or input_str == "text":
string = ""
for i in CMD_LIST:
string += "⚡ " + i + "\n"
for iter_list in CMD_LIST[i]:
string += " `" + str(iter_list) + "`"
string += "\n"
string += "\n"
if len(string) > 4095:
with io.BytesIO(str.encode(string)) as out_file:
out_file.name = "cmd.txt"
await bot.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption="**COMMANDS**",
reply_to=reply_to_id
)
await event.delete()
else:
await event.edit(string)
elif input_str:
if input_str in CMD_LIST:
string = "الأوامر متوفرة في {}: \n".format(input_str)
for i in CMD_LIST[input_str]:
string += " " + i
string += "\n"
string +="© @meaallh100"
await event.edit(string)
else:
await event.edit(input_str + " ليس مكونًا إضافيًا صالحًا")
else:
help_string = f"""`Userbot Helper لـ {DEFAULTUSER} للكشف عن جميع أوامر `**[TeleBot](https://github.com/meaall-com/telebot/)**\n\n"""
results = await bot.inline_query( # pylint:disable=E0602
tgbotusername,
help_string
)
await results[0].click(
event.chat_id,
reply_to=event.reply_to_msg_id,
hide_via=True
)
await event.delete()
|
py | b416b523a2ee47258c8f0cb14a4e329a722ab643 | from django.contrib.auth import get_user_model
from django.urls import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Ingredient
from recipe.serializers import IngredientSerializer
INGREDIENTS_URL = reverse('recipe:ingredient-list')
class PublicIngredientsApiTests(TestCase):
"""Test the publicly available ingredients API"""
def setUp(self):
self.client = APIClient()
def test_login_required(self):
"""Test that login is required to access the endpoint"""
res = self.client.get(INGREDIENTS_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateIngredientsApiTests(TestCase):
"""Test the private ingredients API"""
def setUp(self):
self.client = APIClient()
self.user = get_user_model().objects.create_user(
email='[email protected]',
password='testpass'
)
self.client.force_authenticate(self.user)
def test_retrieve_ingredients(self):
"""Test retrieving a list of ingredients"""
Ingredient.objects.create(user=self.user, name='Kale')
Ingredient.objects.create(user=self.user, name='Salt')
res = self.client.get(INGREDIENTS_URL)
ingredients = Ingredient.objects.all().order_by('-name')
serializer = IngredientSerializer(ingredients, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_ingredients_limited_to_user(self):
"""Test that ingredients for the authenticated user are returned"""
user2 = get_user_model().objects.create_user(
email='[email protected]',
password='testpass'
)
Ingredient.objects.create(user=user2, name='Vinegar')
ingredient = Ingredient.objects.create(user=self.user, name='Tumeric')
res = self.client.get(INGREDIENTS_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data[0]['name'], ingredient.name)
def test_create_ingredients_successful(self):
"""Test create a new ingredients"""
payload = {'name': 'Cabbage'}
self.client.post(INGREDIENTS_URL, payload)
exists = Ingredient.objects.filter(
user=self.user,
name=payload['name']
).exists()
self.assertTrue(exists)
def test_create_ingredients_invalid(self):
"""Test creating invalid ingredients fails"""
payload = {'name': ''}
res = self.client.post(INGREDIENTS_URL, payload)
self.assertTrue(res.status_code, status.HTTP_400_BAD_REQUEST)
|
py | b416b53879f0fea83d0065963e6549edc236bfa1 | """
e3sm_to_cmip cmor handler script
Variable: so4_a2 (Concentration of so4_a2, in kg/kg)
Matt Nicholson
24 Feb 2020
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import cmor
from e3sm_to_cmip.lib import handle_variables
# list of raw variable names needed
RAW_VARIABLES = [str('so4_a2')]
VAR_NAME = str('so4_a2')
VAR_UNITS = str("kg/kg")
TABLE = str('CMIP6_Amon.json')
LEVELS = {
'name': str('lev'),
'units': str('hPa'),
'e3sm_axis_name': 'lev'
}
def write_data(varid, data, timeval, timebnds, index, **kwargs):
"""
so4_a1 = so4_a1
"""
cmor.write(
varid,
data['so4_a2'][index, :],
time_vals=timeval,
time_bnds=timebnds)
# ------------------------------------------------------------------
def handle(infiles, tables, user_input_path, **kwargs):
"""
Parameters
----------
infiles (List): a list of strings of file names for the raw input data
tables (str): path to CMOR tables
user_input_path (str): path to user input json file
Returns
-------
var name (str): the name of the processed variable after processing is complete
"""
return handle_variables(
metadata_path=user_input_path,
tables=tables,
table=TABLE,
infiles=infiles,
raw_variables=RAW_VARIABLES,
write_data=write_data,
outvar_name=VAR_NAME,
outvar_units=VAR_UNITS,
serial=kwargs.get('serial'),
levels=LEVELS,
logdir=kwargs.get('logdir'))
# ------------------------------------------------------------------ |
py | b416b5420a5753ba15489fad64e0a89c7398f89e | """Provides a class for managing BIG-IP FDB tunnel resources."""
# coding=utf-8
#
# Copyright (c) 2017-2021 F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from f5_cccl.resource import Resource
from f5_cccl.resource.net.fdb.record import Record
LOGGER = logging.getLogger(__name__)
class FDBTunnel(Resource):
"""FDBTunnel class for managing network configuration on BIG-IP."""
properties = dict(name=None,
partition=None,
records=list())
def __init__(self, name, partition, default_route_domain, **data):
"""Create a tunnel from CCCL fdbTunnelType."""
super(FDBTunnel, self).__init__(name, partition)
records = data.get('records', list())
self._data['records'] = self._create_records(
default_route_domain, records)
def __eq__(self, other):
if not isinstance(other, FDBTunnel):
LOGGER.warning(
"Invalid comparison of FDBTunnel object with object "
"of type %s", type(other))
return False
for key in self.properties:
if key == 'records':
if len(self._data[key]) != len(other.data[key]):
return False
for record in self._data[key]:
if record not in other.data[key]:
return False
idx = other.data[key].index(record)
if record != other.data[key][idx]:
return False
continue
if self._data[key] != other.data.get(key):
return False
return True
def _create_records(self, default_route_domain, records):
"""Create a list of records for the tunnel."""
new_records = list()
for record in records:
record['default_route_domain'] = default_route_domain
new_records.append(Record(**record).data)
return new_records
def __hash__(self): # pylint: disable=useless-super-delegation
return super(FDBTunnel, self).__hash__()
def _uri_path(self, bigip):
return bigip.tm.net.fdb.tunnels.tunnel
class IcrFDBTunnel(FDBTunnel):
"""FDBTunnel object created from the iControl REST object."""
pass
class ApiFDBTunnel(FDBTunnel):
"""FDBTunnel object created from the API configuration object."""
pass
|
py | b416b6a8f41b469fda3bdaef0d70635b0f529a45 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
##
# Just enough auxiliary bits to make the translated code work.
#
# This package provides the support necessary to use the translated
# code. The configuration modules used in translation take care of
# many semantic differences between Java and Python, while this
# package provides the rest.
##
import copy
import functools
import socket
import struct
import sys
class classmethod_(classmethod):
""" Classmethod that provides attribute delegation.
"""
def __init__(self, func):
classmethod.__init__(self, func)
self.func = func
def __getattr__(self, name):
return getattr(self.func, name)
def synchronized(lock):
""" Synchronization decorator.
from http://wiki.python.org/moin/PythonDecoratorLibrary
@param lock Lock or RLock instance
@return decorator that provides automatic locking
"""
def wrapper(func):
@functools.wraps(func)
def inner(*args, **kwds):
lock.acquire()
try:
return func(*args, **kwds)
finally:
lock.release()
return inner
return wrapper
class Boolean(object):
""" Partial implementation of Java Boolean type.
"""
def __init__(self, value):
""" Constructor.
@param value bool instance, True or False
"""
self.value = value
def booleanValue(self):
""" The value of this instance (a bool).
@return True or False
"""
return self.value
@classmethod
def valueOf(cls, text):
""" Creates an instance of this class with a bool value.
@param cls this class
@param text string
@return instance of cls
"""
value = str(text).lower() == 'true'
return cls(value)
class Cloneable(object):
""" Stub for the Cloneable Java interface.
Some of the translated code implements the Java Cloneable
interface, but its methods are never used. We provide this class
for sub typing, and will implement methods as needed later.
"""
def clone(self):
return copy.copy(self)
class DataInputStream(object):
""" Partial implementation of the Java DataInputStream type.
"""
def __init__(self, stream):
""" Constructor.
@param stream any object with recv method
"""
self.stream = stream
self.recv = stream.recv
def readByte(self, unpack=struct.unpack):
""" Reads a byte from the contained stream.
@return string read from stream
"""
return unpack('!b', self.recv(1))[0]
class DataOutputStream(object):
""" Partial implementation of the Java DataOutputStream type
"""
def __init__(self, stream):
""" Constructor.
@param stream any object with send method
"""
self.send = stream.send
def write(self, data, pack=struct.pack, eol=struct.pack('!b', 0)):
""" Writes data to the contained stream.
@param data string to send, or 0
@return None
"""
send = self.send
if data == 0:
send(eol)
else:
for char in data:
send(pack('!c', char))
class Double(float):
""" Partial implementation of Java Double type.
"""
##
# sentinel value used by the socket writer
MAX_VALUE = sys.maxint
@staticmethod
def parseDouble(text):
""" Float double (float) from string.
@param text value to parse
@return float instance
"""
return float(text or 0)
class Integer(int):
""" Partial implementation of Java Integer type.
"""
##
# sentinel value used by the socket writer
MAX_VALUE = sys.maxint
@staticmethod
def parseInt(text):
""" Int from string.
@param text value to parse
@return int instance
"""
return int(text or 0)
@staticmethod
def parseLong(text):
""" Long from string.
@param text value to parse
@return long instance
"""
return long(text or 0)
##
# The generated code uses Longs just like Integers, so we use an alias
# instead of a subclass (for now).
Long = Integer
class Socket(socket.socket):
""" Partial implementation of the Java Socket type.
"""
def __init__(self, host, port):
""" Constructor; attempts connection immediately.
@param host hostname as string
@param port port number as integer
"""
socket.socket.__init__(self, socket.AF_INET, socket.SOCK_STREAM)
self.connect((host, port))
def getInputStream(self):
""" Returns this instance, which has a send method.
"""
return self
def getOutputStream(self):
""" Returns this instance, which has a recv method.
"""
return self
def isConnected(self):
try:
throwaway = self.getpeername()
return True
except (socket.error, ), ex:
return False
class StringBuffer(list):
""" Partial implementation of the Java StringBuffer type
Translated code uses instances of this type to build up strings.
The list base type provides the append method.
"""
def __str__(self, join=str.join, chr=chr):
""" the string value of this instance
@return string from characters contained in this instance
"""
return join('', [chr(v) for v in self])
if 'qt' in sys.modules:
from qt import QThread
class ThreadType(QThread):
""" Partial implementation of Java Thread type, based on Qt3 QThread.
"""
def __init__(self, name):
""" Constructor.
@param name ignored
"""
QThread.__init__(self)
def interrupt(self):
""" Stop this thread (by call to terminate).
"""
return self.terminate()
def isInterrupted(self):
""" Check state of thread.
@return True if thread is finished
"""
return self.finished()
def setDaemon(self, value):
""" No-op.
@param value ignored
@return None
"""
def setName(self, value):
""" No-op.
@param value ignored
@return None
"""
elif 'PyQt4' in sys.modules:
from PyQt4.QtCore import QThread
class ThreadType(QThread):
""" Partial implementation of Java Thread type, based on Qt4 QThread.
"""
def __init__(self, name):
""" Constructor.
@param name ignored
"""
QThread.__init__(self)
def interrupt(self):
""" stop this thread (by call to exit)
"""
return self.exit()
def isInterrupted(self):
""" check state of thread
@return True if thread is finished
"""
return self.isFinished()
def setDaemon(self, value):
""" No-op.
@param value ignored
@return None
"""
def setName(self, value):
""" sets the name of this QObject
@param value name of object as string
@return None
"""
self.setObjectName(value)
else:
import threading
class ThreadType(threading.Thread):
""" Partial implementation of Java Thread type, based on Python Thread.
"""
def __init__(self, name):
""" Constructor.
@param name name of this thread
"""
threading.Thread.__init__(self, name=name)
self.setDaemon(True)
def interrupt(self):
""" No-op; Python threads are not directly interruptible.
"""
return False
def isInterrupted(self):
""" Check state of thread (always False).
@return False
"""
return False
class Thread(ThreadType):
""" Thread parent type, based on available framework
"""
def __init__(self, name, parent, dis):
""" Constructor.
@param name name of this thread
@param parent ignored
@param dis ignored
"""
ThreadType.__init__(self, name=name)
def term(self):
def isInterrupted():
print 'down town'
return True
self.isInterrupted = isInterrupted
self.m_dis.stream.shutdown(socket.SHUT_RDWR)
self.m_dis.stream.close()
|
py | b416b76af4bfbc64124920859e67b6852a9ad931 | #
# This file is part of the PyMeasure package.
#
# Copyright (c) 2013-2021 PyMeasure Developers
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import logging
from time import sleep
from pymeasure.instruments import Instrument
from pymeasure.instruments.validators import (
strict_discrete_set,
strict_range,
truncated_range,
)
log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
# Programmer's guide 8-92, defined outside of the class, since it is used by
# `Instrument.control` without access to `self`.
MAX_MEASUREMENT_TIME = 1000
class CNT91(Instrument):
"""Represents a Pendulum CNT-91 frequency counter."""
CHANNELS = {"A": 1, "B": 2, "C": 3, "E": 4, "INTREF": 6}
MAX_BUFFER_SIZE = 32000 # User Manual 8-38
def __init__(self, resourceName, **kwargs):
kwargs.setdefault('timeout', 120000)
kwargs.setdefault('read_termination', '\n')
super().__init__(
resourceName,
"Pendulum CNT-91",
**kwargs,
)
@property
def batch_size(self):
"""Maximum number of buffer entries that can be transmitted at once."""
if not hasattr(self, "_batch_size"):
self._batch_size = int(self.ask("FORM:SMAX?"))
return self._batch_size
def read_buffer(self, expected_length=0):
"""
Read out the entire buffer.
:param expected_length: The expected length of the buffer. If more
data is read, values at the end are removed. Defaults to 0,
which means that the entire buffer is returned independent of its
length.
:return: Frequency values from the buffer.
"""
while not self.complete:
# Wait until the buffer is filled.
sleep(0.01)
data = []
# Loop until the buffer is completely read out.
while True:
# Get maximum number of buffer values.
new = self.values(":FETC:ARR? MAX")
data += new
# Last values have been read from buffer.
if len(new) < self.batch_size:
# Remove the last values if the buffer is too long.
if expected_length and len(data) > expected_length:
data = data[:expected_length]
log.info("Buffer was too long, truncated.")
break
return data
external_start_arming_source = Instrument.control(
"ARM:SOUR?",
"ARM:SOUR %s",
"""
Select arming input or switch off the start arming function.
Options are 'A', 'B' and 'E' (rear). 'IMM' turns trigger off.
""",
validator=strict_discrete_set,
values={"A": "EXT1", "B": "EXT2", "E": "EXT4", "IMM": "IMM"},
map_values=True,
)
external_arming_start_slope = Instrument.control(
"ARM:SLOP?",
"ARM:SLOP %s",
"Set slope for the start arming condition.",
validator=strict_discrete_set,
values=["POS", "NEG"],
)
continuous = Instrument.control(
"INIT:CONT?",
"INIT:CONT %s",
"Controls whether to perform continuous measurements.",
strict_discrete_set,
values={True: 1.0, False: 0.0},
map_values=True,
)
measurement_time = Instrument.control(
":ACQ:APER?",
":ACQ:APER %f",
"Gate time for one measurement in s.",
validator=strict_range,
values=[2e-9, MAX_MEASUREMENT_TIME], # Programmer's guide 8-92
)
format = Instrument.control(
"FORM?",
"FORM %s",
"Reponse format (ASCII or REAL).",
validator=strict_discrete_set,
values=["ASCII", "REAL"],
)
interpolator_autocalibrated = Instrument.control(
":CAL:INT:AUTO?",
"CAL:INT:AUTO %s",
"Controls if interpolators should be calibrated automatically.",
strict_discrete_set,
values={True: 1.0, False: 0.0},
map_values=True,
)
def configure_frequency_array_measurement(self, n_samples, channel):
"""
Configure the counter for an array of measurements.
:param n_samples: The number of samples
:param channel: Measurment channel (A, B, C, E, INTREF)
"""
n_samples = truncated_range(n_samples, [1, self.MAX_BUFFER_SIZE])
channel = strict_discrete_set(channel, self.CHANNELS)
channel = self.CHANNELS[channel]
self.write(f":CONF:ARR:FREQ {n_samples},(@{channel})")
def buffer_frequency_time_series(
self, channel, n_samples, sample_rate, trigger_source=None
):
"""
Record a time series to the buffer and read it out after completion.
:param channel: Channel that should be used
:param n_samples: The number of samples
:param sample_rate: Sample rate in Hz
:param trigger_source: Optionally specify a trigger source to start the
measurement
"""
if self.interpolator_autocalibrated:
max_sample_rate = 125e3
else:
max_sample_rate = 250e3
# Minimum sample rate is 1 sample in the maximum measurement time.
sample_rate = strict_range(
sample_rate, [1 / MAX_MEASUREMENT_TIME, max_sample_rate]
)
measurement_time = 1 / sample_rate
self.clear()
self.format = "ASCII"
self.configure_frequency_array_measurement(n_samples, channel)
self.continuous = False
self.measurement_time = measurement_time
if trigger_source:
self.external_start_arming_source = trigger_source
# start the measurement (or wait for trigger)
self.write(":INIT")
|
py | b416b76d26d11518e3ab411608e62461d3c32a79 | """
This file implement 3 different version of the elemwise op on the
gpu. Only NaiveAlgo is used and it is not very naive now.
The elemwise fct are also used with scalar operation! So it can happen
that ndim is 0 as with all scalar type.
"""
from __future__ import absolute_import, print_function, division
import logging
import numpy
from theano.scalar.basic import upgrade_to_float_no_complex, complex_types
from theano.scalar.basic_scipy import Erfinv
from six import StringIO
from six.moves import xrange
from theano import Apply
from theano import gof, scalar
_logger_name = 'theano.sandbox.cuda.elemwise'
_logger = logging.getLogger(_logger_name)
def _logical_scalar(x):
return numpy.all(x.type.broadcastable)
def get_str_list_logical_scalar(node, value_str='ii_i%i_value',
data_str='ii_i%i_data[0]'):
l = []
for ipos, i in enumerate(node.inputs):
if _logical_scalar(i):
l += [value_str % ipos]
else:
l += [data_str % ipos]
return l
class SupportCodeError(Exception):
"""
It is currently not possible to auto-generate a GPU implementation for
an elementwise Op with c_support_code_apply().
But we support Op.c_support_code.
"""
class NaiveAlgo(object):
"""
Parameters
----------
scalar_op
The scalar operation to execute on each element.
sync
If True, will wait after the kernel launch and check for error call.
"""
verbose = 0 # 1, 2 or 3 for more verbose output.
@property
def cache_version(self):
ver = self.scalar_op.c_code_cache_version()
if ver:
return (20, self.verbose, self.sync, ver)
else:
return ver
def __init__(self, scalar_op, sync=True, inplace_pattern=None):
if inplace_pattern is None:
inplace_pattern = {}
try:
code = scalar_op.c_support_code_apply(None, "nodename")
if code:
raise SupportCodeError(scalar_op)
except gof.utils.MethodNotDefined:
pass
self.scalar_op = scalar_op
self.sync = sync
self.inplace_pattern = inplace_pattern
def c_src_kernel(self, node, nodename, nd):
sio = StringIO()
# print 'C_SRC_KERNEL', sio.getvalue()
print("// %s" % str(node.op), file=sio)
print("// node.op.destroy_map=%s" % str(
getattr(node.op, 'destroy_map', None)), file=sio)
for ipos, i in enumerate(node.inputs):
print("// Input ", ipos, str(i.type), file=sio)
for ipos, i in enumerate(node.outputs):
print("// Output ", ipos, str(i.type), file=sio)
print("static __global__ void kernel_%s_%s_%s(unsigned int numEls" % (
self.scalar_op.__class__.__name__, nodename, nd), file=sio)
if (nd):
print("\t,", ", ".join("const int dim%i" % i
for i in xrange(nd)), file=sio)
# declare inputs
for ipos, i in enumerate(node.inputs):
s = ", ".join(["const float * i%i_data" % ipos] +
["int i%i_str_%i" % (ipos, d) for d in xrange(nd)])
print("\t,", s, file=sio)
# declare outputs
for ipos, i in enumerate(node.outputs):
s = ", ".join(["float * o%i_data" % ipos] +
["int o%i_str_%i" % (ipos, d) for d in xrange(nd)])
print("\t,", s, file=sio)
# print >> sio, "\t,", ", ".join("int o%i_str_%i" % (ipos, d) for d in xrange(nd))
# print >> sio, "\t,", "float * o%i_data" % ipos
print("\t)\n{", file=sio)
print(" const int idx = blockIdx.x * blockDim.x + threadIdx.x;", file=sio)
print(" const int numThreads = blockDim.x * gridDim.x;", file=sio)
# For each input that is a scalar which has been broadcasted to a tensor,
# load it into a local variable
for ipos, i in enumerate(node.inputs):
if _logical_scalar(i):
print(" const float ii_i%i_value = i%i_data[0];" % (ipos, ipos), file=sio)
# loop over the elements to be treated by this kernel call
print(" for (int i = idx; i < numEls; i += numThreads) {", file=sio)
# calculate the data pointers for all arguments
print(" int ii = i;", file=sio)
for ipos, i in enumerate(node.inputs):
if not _logical_scalar(i):
print(" const float * ii_i%i_data = i%i_data;" % (ipos, ipos), file=sio)
for ipos, i in enumerate(node.outputs):
print(" float * ii_o%i_data = o%i_data;" % (ipos, ipos), file=sio)
for d in xrange(nd - 1, -1, -1):
if d > 0:
print(" int pos%i = ii %% dim%i;" % (d, d), file=sio)
print(" ii = ii / dim%i;" % d, file=sio)
else:
print(" int pos%i = ii;" % d, file=sio)
for ipos, i in enumerate(node.inputs):
if not _logical_scalar(i):
print(" ii_i%i_data += pos%i * i%i_str_%i;" % (ipos, d, ipos, d), file=sio)
for ipos, i in enumerate(node.outputs):
print(" ii_o%i_data += pos%i * o%i_str_%i;" % (ipos, d, ipos, d), file=sio)
# perform the scalar operation on the input and output references
# TODO: What if the scalar_op needs support_code??
for ipos, i in enumerate(node.outputs):
print("npy_%s o%d_i;" % (i.dtype, ipos), file=sio)
task_code = self.scalar_op.c_code(
Apply(self.scalar_op,
[scalar.Scalar(dtype=input.type.dtype).make_variable()
for input in node.inputs],
[scalar.Scalar(dtype=output.type.dtype).make_variable()
for output in node.outputs]),
nodename + '_scalar_',
get_str_list_logical_scalar(node),
['o%i_i' % ipos for ipos, i in enumerate(node.outputs)],
sub=dict(fail='return;')) # TODO: set a failure code somehow!!!
print(" ", task_code, file=sio)
for ipos, _ in enumerate(node.outputs):
print("ii_o%i_data[0] = o%i_i;" % (ipos, ipos), file=sio)
print(" }", file=sio)
# indent = " "*(4*d+7)
# for ipos, i in enumerate(node.inputs):
# print >> sio, indent, "const float * i%i" % ipos, '= i%i_data', ''
print("}", file=sio)
# print sio.getvalue()
return sio.getvalue()
def c_src_kernel_tiling(self, node, nodename):
"""
The kernel applies to problems with <= 5 dimensions.
"""
# The kernel is intended to be structured roughly like this:
"""
static __global__ void kernel()
{
for (int v = blockIdx.y; v < dim0; v += gridDim.x)
{
for (int w = blockIdx.y; w < dim1; w += gridDim.y)
{
for (int x = threadIdx.x; x < dim2; x += blockDim.x)
{
for (int y = threadIdx.y; y < dim3; y += blockDim.y)
{
for (int z = threadIdx.z; z < dim4; z += blockDim.z)
{
out[v * out_stride[0] + ...] = f(in1[...], in2[...])
}
}
}
}
}
}
"""
nd = node.outputs[0].type.ndim
sio = StringIO()
# print 'C_SRC_KERNEL', sio.getvalue()
if nd in (4,):
# print some leading comments to make the code easier to read
print("// %s" % str(node.op), file=sio)
print("// node.op.destroy_map=%s" % str(
getattr(node.op, 'destroy_map', None)), file=sio)
for ipos, i in enumerate(node.inputs):
print("// Input ", ipos, str(i.type), file=sio)
for ipos, i in enumerate(node.outputs):
print("// Output ", ipos, str(i.type), file=sio)
print(
"static __global__ void kernel_%s_%s_%s(unsigned int numEls" %
(self.scalar_op.__class__.__name__,
nodename,
'tiling%i' % nd), file=sio)
if (nd):
print("\t,", ", ".join("const int dim%i" % i for i in xrange(nd)), file=sio)
# declare inputs
for ipos, i in enumerate(node.inputs):
s = ", ".join(["const float * i%i_data" % ipos] + list("int i%i_str_%i" % (ipos, d) for d in xrange(nd)))
print("\t,", s, file=sio)
# declare outputs
for ipos, i in enumerate(node.outputs):
s = ", ".join(["float * o%i_data" % ipos] + list("int o%i_str_%i" % (ipos, d) for d in xrange(nd)))
print("\t,", s, file=sio)
# print >> sio, "\t,", ", ".join("int o%i_str_%i" % (ipos, d) for d in xrange(nd))
# print >> sio, "\t,", "float * o%i_data" % ipos
print("\t)\n{", file=sio)
# For each input that is a scalar which has been broadcasted to a tensor,
# load it into a local variable
print(" __shared__ float value0[%i];" % len(node.inputs), file=sio)
print(" __shared__ int shared_dims[%(nd)s];" % locals(), file=sio)
# print >> sio, " __shared__ int shared_i_str[%(n_in)s][%(nd)s]"
print(" if ((threadIdx.x == 0) && (threadIdx.y == 0)) {", file=sio)
for ipos, i in enumerate(node.inputs):
if _logical_scalar(i):
print(" value0[%i] = i%i_data[0];" % (ipos, ipos), file=sio)
for ipos in xrange(nd):
print(" shared_dims[%i] = dim%i;" % (ipos, ipos), file=sio)
print(" }", file=sio)
print(" __syncthreads();", file=sio)
if (nd == 4):
print("""
for (int pos0 = blockIdx.x; pos0 < shared_dims[0]; pos0 += gridDim.x)
{
for (int pos1 = blockIdx.y; pos1 < shared_dims[1]; pos1 += gridDim.y)
{
//for (int pos2 = threadIdx.x; pos2 < shared_dims[2]; pos2 += blockDim.x)
for (int pos2 = threadIdx.y; pos2 < shared_dims[2]; pos2 += blockDim.y)
{
//for (int pos3 = threadIdx.y; pos3 < shared_dims[3]; pos3 += blockDim.y)
for (int pos3 = threadIdx.x; pos3 < shared_dims[3]; pos3 += blockDim.x)
{
""", file=sio)
else:
raise NotImplementedError()
for ipos, i in enumerate(node.inputs):
if not _logical_scalar(i):
print(" const float * ii_i%i_data = i%i_data;" % (ipos, ipos), file=sio)
for ipos, i in enumerate(node.outputs):
print(" float * ii_o%i_data = o%i_data;" % (ipos, ipos), file=sio)
for d in xrange(nd):
for ipos, i in enumerate(node.inputs):
if not _logical_scalar(i):
print(" ii_i%i_data += pos%i * i%i_str_%i;" % (ipos, d, ipos, d), file=sio)
for ipos, i in enumerate(node.outputs):
print(" ii_o%i_data += pos%i * o%i_str_%i;" % (ipos, d, ipos, d), file=sio)
# perform the scalar operation on the input and output references
# TODO: What if the scalar_op needs support_code??
task_code = self.scalar_op.c_code(
Apply(
self.scalar_op,
[scalar.Scalar(
dtype=input.type.dtype).make_variable()
for input in node.inputs],
[scalar.Scalar(
dtype=output.type.dtype).make_variable()
for output in node.outputs]),
nodename + '_scalar_',
get_str_list_logical_scalar(node, value_str='value0[%i]'),
['ii_o%i_data[0]' % ipos for ipos, i in enumerate(node.outputs)],
sub=dict(fail='return;')) # TODO: set a failure code somehow!!!
print(" ", task_code, file=sio)
print(" }" * nd, file=sio)
# TODO: insert runtime stride checks that select the best loop order either here, or in
# the host code that launched the kernel (host code probably better spot)
# indent = " "*(4*d+7)
# for ipos, i in enumerate(node.inputs):
# print >> sio, indent, "const float * i%i" % ipos, '= i%i_data', ''
print("}", file=sio)
print(sio.getvalue())
return sio.getvalue()
def c_src_kernel_tiling_less_registers(self, node, nodename):
"""
The kernel applies to problems with <= 5 dimensions.
"""
nd = node.outputs[0].type.ndim
n_in = len(node.inputs)
n_out = len(node.outputs)
sio = StringIO()
if nd not in (2,):
return sio.getvalue()
# print some leading comments to make the code easier to read
print("// %s" % str(node.op), file=sio)
print("// node.op.destroy_map=%s" % str(
getattr(node.op, 'destroy_map', None)), file=sio)
for ipos, i in enumerate(node.inputs):
print("// Input ", ipos, str(i.type), file=sio)
for ipos, i in enumerate(node.outputs):
print("// Output ", ipos, str(i.type), file=sio)
print(
"static __global__ void kernel_%s_%s_%s(unsigned int numEls" %
(self.scalar_op.__class__.__name__,
nodename,
'tiling%i_less_registers' % nd), file=sio)
if (nd):
print("\t,", ", ".join("const int dim%i" % i for i in xrange(nd)), file=sio)
# declare inputs
for ipos, i in enumerate(node.inputs):
s = ", ".join(["const float * i%i_data_0" % ipos] + list("int i%i_str_%i" % (ipos, d) for d in xrange(nd)))
print("\t,", s, file=sio)
# declare outputs
for ipos, i in enumerate(node.outputs):
s = ", ".join(["float * o%i_data_0" % ipos] + list("int o%i_str_%i" % (ipos, d) for d in xrange(nd)))
print("\t,", s, file=sio)
# print >> sio, "\t,", ", ".join("int o%i_str_%i" % (ipos, d) for d in xrange(nd))
# print >> sio, "\t,", "float * o%i_data" % ipos
print("\t)\n{", file=sio)
# TODO: Setting these to true makes the function fail SOMETIMES. I don't know why yet.
use_shared_stride = False
use_shared_limits = False
def decl_limits(nd):
if use_shared_limits:
print("__shared__ float * limits[%(nd)s];" % locals(), file=sio)
def stride(io, p, d):
if use_shared_stride:
return "s%s_str[%i][%i]" % (io, p, d)
else:
return "%s%i_str_%i" % (io, p, d)
def limits(d):
if use_shared_limits:
return "limits[%i]" % d
else:
return "limits%i" % d
def decl_shared_stride(nin, nout, nd):
if not use_shared_stride:
return
print("""
__shared__ int si_str[%(nin)s][%(nd)s];
__shared__ int so_str[%(nout)s][%(nd)s];
if ((threadIdx.x == 0) && (threadIdx.y == 0)) {
""" % locals(), file=sio)
for i in xrange(nin):
for d in xrange(nd):
print("si_str[%(i)s][%(d)s] = i%(i)s_str_%(d)s;" % locals(), file=sio)
for i in xrange(n_out):
for d in xrange(nd):
print("so_str[%(i)s][%(d)s] = o%(i)s_str_%(d)s;" % locals(), file=sio)
print("} __syncthreads();", file=sio)
def calc_limit(d):
s = stride('o', 0, d)
lname = limits(d)
if use_shared_limits:
print("if ((threadIdx.x == 0) && (threadIdx.y == 0)) {", file=sio)
if d == 0:
print("%(lname)s = o0_data_0 + dim%(d)s * %(s)s;" % locals(), file=sio)
else:
dm1 = d - 1
print("%(lname)s = o0_data_%(dm1)s + dim%(d)s * %(s)s;" % locals(), file=sio)
print("} __syncthreads();", file=sio)
else:
if d == 0:
print("const float * %(lname)s = o0_data_0 + dim%(d)s * %(s)s;" % locals(), file=sio)
else:
dm1 = d - 1
print("const float * %(lname)s = o0_data_%(dm1)s + dim%(d)s * %(s)s;" % locals(), file=sio)
def decl_ptrs(d, offset):
dm1 = d - 1
assert dm1 >= 0
for i in xrange(n_in):
s = stride('i', i, d)
print("const float * i%(i)s_data_%(d)s = i%(i)s_data_%(dm1)s + %(offset)s * %(s)s;" % locals(), file=sio)
for i in xrange(n_out):
s = stride('o', i, d)
print("float * o%(i)s_data_%(d)s = o%(i)s_data_%(dm1)s + %(offset)s * %(s)s;" % locals(), file=sio)
def inc_ptrs(d, amt):
for i in xrange(n_in):
s = stride('i', i, d)
print("i%(i)s_data_%(d)s += %(amt)s * %(s)s;" % locals(), file=sio)
for i in xrange(n_out):
s = stride('o', i, d)
print("o%(i)s_data_%(d)s += %(amt)s * %(s)s;" % locals(), file=sio)
def while_limit(d):
lname = limits(d)
print("while (o0_data_%(d)s < %(lname)s) { " % locals(), file=sio)
def end_while(d):
print("}", file=sio)
def task_code(d):
print(self.scalar_op.c_code(
Apply(
self.scalar_op,
[scalar.Scalar(dtype=input.type.dtype).make_variable()
for input in node.inputs],
[scalar.Scalar(dtype=output.type.dtype).make_variable()
for output in node.outputs]),
nodename + '_scalar_',
['i%i_data_%i[0]' % (ipos, d) for ipos,
i in enumerate(node.inputs)],
['o%i_data_%i[0]' % (ipos, d) for ipos,
i in enumerate(node.outputs)],
sub=dict(fail='return;')), file=sio)
# TODO: set a failure code somehow!!!
if nd == 4:
decl_shared_stride(n_in, n_out, nd)
decl_limits(nd)
calc_limit(0)
inc_ptrs(0, 'blockIdx.x')
while_limit(0)
if 1:
calc_limit(1)
decl_ptrs(1, 'blockIdx.y')
while_limit(1)
if 1:
calc_limit(2)
decl_ptrs(2, 'threadIdx.y')
while_limit(2)
if 1:
calc_limit(3)
decl_ptrs(3, 'threadIdx.x')
while_limit(3)
if 1:
task_code(3)
inc_ptrs(3, 'blockDim.x')
end_while(3)
inc_ptrs(2, 'blockDim.y')
end_while(2)
inc_ptrs(1, 'gridDim.y')
end_while(1)
inc_ptrs(0, 'gridDim.x')
end_while(0)
print("}", file=sio)
print(sio.getvalue())
return sio.getvalue()
def c_src_kernel_Ccontiguous(self, node, nodename):
sio = StringIO()
# print 'C_SRC_KERNEL', sio.getvalue()
print("// %s" % str(node.op), file=sio)
print("// node.op.destroy_map=%s" % str(
getattr(node.op, 'destroy_map', None)), file=sio)
for ipos, i in enumerate(node.inputs):
print("// Input ", ipos, str(i.type), file=sio)
for ipos, i in enumerate(node.outputs):
print("// Output ", ipos, str(i.type), file=sio)
print("static __global__ void kernel_%s_%s_Ccontiguous (unsigned int numEls" % (self.scalar_op.__class__.__name__, nodename), file=sio)
# declare inputs
for ipos, i in enumerate(node.inputs):
print("\t,", "const float * i%i_data" % ipos, file=sio)
# declare outputs
for ipos, i in enumerate(node.outputs):
print("\t,", "float * o%i_data" % ipos, file=sio)
print("\t)\n{", file=sio)
print(" const int idx = blockIdx.x * blockDim.x + threadIdx.x;", file=sio)
print(" const int numThreads = blockDim.x * gridDim.x;", file=sio)
# For each input that is a scalar which has been broadcasted to a tensor,
# load it into a local variable
for ipos, i in enumerate(node.inputs):
if _logical_scalar(i):
print(" const float ii_i%i_value = i%i_data[0];" % (ipos, ipos), file=sio)
# loop over the elements to be treated by this kernel call
print(" for (int i = idx; i < numEls; i += numThreads) {", file=sio)
# perform the scalar operation on the input and output references
# TODO: What if the scalar_op needs support_code??
for ipos, i in enumerate(node.outputs):
print("npy_%s o%d_i;" % (i.dtype, ipos), file=sio)
task_code = self.scalar_op.c_code(
Apply(
self.scalar_op,
[scalar.Scalar(dtype=input.type.dtype).make_variable()
for input in node.inputs],
[scalar.Scalar(dtype=output.type.dtype).make_variable()
for output in node.outputs]),
nodename + '_scalar_',
# , ['i%i_data[i]'%ipos for ipos,
# i in enumerate(node.inputs)]
get_str_list_logical_scalar(node, data_str='i%i_data[i]'),
['o%i_i' % ipos for ipos, i in enumerate(node.outputs)],
sub=dict(fail='return;'))
# TODO: set a failure code somehow!!!
print(" ", task_code, file=sio)
for ipos, _ in enumerate(node.outputs):
print("o%i_data[i] = o%i_i;" % (ipos, ipos), file=sio)
print(" }", file=sio)
print("}", file=sio)
# print sio.getvalue()
return sio.getvalue()
def c_src_callkernel(self, node, nodename):
#
# This function serves three main goals:
#
# The first is stride unpacking:
# it accepts input and output arguments as
# float * , int*
# pairs, and it constructs a kernel function call where inputs and arguments are named
# like
# float *, int, int, int ...
#
# The second is to recognize when any dimensions can be collapsed as
# being contiguous. That mean that we can merge that dimensions with another
# one for all inputs/outputs and have the same retusuls (confusing... read code)
#
# The thrid is to make a special case for scalar element. We allow the collapsing of them.
# In the ccontiguous and not contiguous case, we use registers to lower the number of memory access.
# TODO: make a special case for broadcasting, to store the data in shared memory.
nd = node.outputs[0].type.ndim
nb_inputs = len(node.inputs)
nb_outputs = len(node.outputs)
d = dict()
# input_params and output_params go into the function declaration/definition
input_params = ", ".join(
"const float * i%i_data, const int * i%i_str" % (ipos, ipos)
for ipos in xrange(len(node.inputs)))
output_params = ", ".join(
"float * o%i_data, const int * o%i_str" % (ipos, ipos)
for ipos in xrange(len(node.outputs)))
# input_args and output_args go into the recursive call.
input_args = ", ".join("i%i_data, i%i_str" % (ipos, ipos)
for ipos in xrange(len(node.inputs)))
output_args = ", ".join("o%i_data, o%i_str" % (ipos, ipos)
for ipos in xrange(len(node.outputs)))
prod_dims = '*'.join(
["dims[%i]" % di for di in xrange(nd)] + ['1'])
scalar_op = self.scalar_op.__class__.__name__
sio = StringIO()
print("""
static void can_collapse_%(nodename)s(int nd, const int * dims, const int * strides, int collapse[])
{
//can we collapse dims[i] and dims[i-1]
for(int i=nd-1;i>0;i--){
if(strides[i]*dims[i]==strides[i-1]){//the dims nd-1 are not strided again dimension nd
collapse[i]=1;
}else collapse[i]=0;
}
}
""" % locals(), file=sio)
print("""
static int callkernel_%(nodename)s(unsigned int numEls, const int d,
const int * dims,
%(input_params)s,
%(output_params)s)
{
numEls = %(prod_dims)s;
""" % locals(), file=sio)
if self.verbose:
print("""
std::cerr << "calling kernel_%(scalar_op)s_%(nodename)s w numEls" << numEls << " dims"<< d << "\\n";
""" % locals(), file=sio)
print(
'std::cerr << ' + " << ' ' << ".join(
['" "'] +
list("dims[%i]" % di for di in xrange(nd)) +
["'\\n';"]),
file=sio)
if self.verbose > 1:
for ipos in xrange(len(node.inputs)):
istrings = [
"i%s_str[%i]" % (ipos, di) for di in xrange(nd)]
ipositions = " << ' ' << ".join(
["i%s_data" % ipos] + istrings)
print("""
std::cerr << " %(ipos)s data strides" << %(ipositions)s << "\\n";
""" % dict(ipos=ipos, ipositions=ipositions), file=sio)
for ipos in xrange(len(node.outputs)):
print("""
std::cerr << " %(ipos)s data strides" <<
""" % locals() + " << ' ' << ".join(
["o%s_data" % ipos] +
list(
"o%s_str[%i]" % (ipos, di) for di in xrange(nd)
)) +
''' << "\\n"; ''', file=sio)
# collapse dimension that are broadcast in all inputs.
# need to be done before contiguous collapse as it will break it.
# do the dimensions and the strides
if nd > 0:
print("int local_dims[%(nd)s];" % locals(), file=sio)
else:
print("int *local_dims=NULL;", file=sio)
if nb_inputs > 0 and nd > 0:
print("""
int local_str[%(nb_inputs)s][%(nd)s];
int local_ostr[%(nb_outputs)s][%(nd)s];
""" % locals(), file=sio)
else:
print("""
int local_str[1][1];
int local_ostr[1][1];
""", file=sio)
print("""
int nd_collapse = %(nd)s;
for(int i=0;i<%(nd)s;i++){//init new dim
local_dims[i]=dims[i];
}
""" % locals(), file=sio)
for ipos in xrange(len(node.inputs)):
print("""
for(int i=0;i<%(nd)s;i++){//init new strides
local_str[%(ipos)s][i]=i%(ipos)s_str[i];
}
""" % locals(), file=sio)
for ipos in xrange(len(node.outputs)):
print("""
for(int i=0;i<%(nd)s;i++){//init new strides
local_ostr[%(ipos)s][i]=o%(ipos)s_str[i];
}
""" % locals(), file=sio)
if self.verbose > 2:
print('std::cerr <<"before broadcast collapse\\n";', file=sio)
print('std::cerr<< "nd_collapse "<< nd_collapse << "\\n"; ', file=sio)
print('std::cerr << "local_dims";', file=sio)
for d in xrange(nd):
print('std::cerr << " " << local_dims[%(d)s]; ' % locals(), file=sio)
print('std::cerr << "\\n";', file=sio)
if nd > 0:
for ipos in xrange(len(node.inputs)):
print(
'std::cerr << " local_str inputs %(ipos)s: " <<' % locals() +
' << " " << '.join(["local_str[%s][%s]" % (ipos, x)
for x in xrange(nd)]) +
'<<"\\n";', file=sio)
for ipos in xrange(len(node.outputs)):
print(
'std::cerr << " local_ostr inputs %(ipos)s: " <<' %
locals() +
' << " " << '.join(
["local_ostr[%s][%s]" %
(ipos, x) for x in xrange(nd)]) +
'<<"\\n";', file=sio)
print("""
for(int id=0;id<nd_collapse;id++){
bool all_broadcast=true;
for(int input_id=0;input_id<%(nb_inputs)s;input_id++){
if(local_str[input_id][id]!=0 || local_dims[id]!=1) all_broadcast= false;
}
for(int input_id=0;input_id<%(nb_outputs)s;input_id++){
if(local_ostr[input_id][id]!=0 || local_dims[id]!=1) all_broadcast= false;
}
if(all_broadcast){
for(int j=id+1;j<nd_collapse;j++)//remove dims i from the array
local_dims[j-1]=local_dims[j];
for(int input_id=0;input_id<%(nb_inputs)s;input_id++){
for(int j=id+1;j<nd_collapse;j++){//remove dims i from the array
local_str[input_id][j-1]=local_str[input_id][j];
}
}
for(int output_id=0;output_id<%(nb_outputs)s;output_id++){
for(int j=id+1;j<nd_collapse;j++){//remove dims i from the array
local_ostr[output_id][j-1]=local_ostr[output_id][j];
}
}
nd_collapse--; id--;
}
}
""" % locals(), file=sio)
if self.verbose > 2:
print('std::cerr <<"after broadcast collapse\\n";', file=sio)
print('std::cerr<< "nd_collapse "<< nd_collapse << "\\n"; ', file=sio)
print('std::cerr << "local_dims";', file=sio)
for d in xrange(nd):
print('std::cerr << " " << local_dims[%(d)s]; ' %
locals(), file=sio)
print('std::cerr << "\\n";', file=sio)
if nd > 0:
for ipos in xrange(len(node.inputs)):
print('std::cerr << " local_str %(ipos)s: " <<' %
locals() + ' << " " << '.join(
["local_str[%s][%s]" %
(ipos, x) for x in xrange(nd)]) +
'<<"\\n";', file=sio)
for ipos in xrange(len(node.outputs)):
print(
'std::cerr << " local_ostr %(ipos)s: " <<' %
locals() + ' << " " << '.join(
["local_ostr[%s][%s]" %
(ipos, x) for x in xrange(nd)]) +
'<<"\\n";', file=sio)
# collapse contiguous dimensions (ignoring scalars, generic version(collapse any dimensions, right, left, middle))
# this is a good idea because we make less index calculation in the gpu.
if nd > 0:
print("int nd_collapse_[%(nd)s] = {" %
locals() + ','.join(
['1' for x in xrange(nd)]) + "};", file=sio)
else:
print("int *nd_collapse_ = NULL;", file=sio)
for ipos in xrange(len(node.inputs)):
if not _logical_scalar(node.inputs[ipos]):
if nd > 0:
print("""
int nd_collapse_%(ipos)s[%(nd)s] = {""" %
locals() +
','.join(['1' for x in xrange(nd)]) +
"};", file=sio)
else:
print("""
int * nd_collapse_%(ipos)s = NULL;""" %
locals(), file=sio)
print("""
can_collapse_%(nodename)s(nd_collapse, local_dims, local_str[%(ipos)s], nd_collapse_%(ipos)s);
for(int i=0;i<nd_collapse;i++){
if(nd_collapse_%(ipos)s[i]==0)
nd_collapse_[i]=0;
}
""" % locals(), file=sio)
if self.verbose > 1:
print("""
std::cerr<< "nd_collapse_%(ipos)s "<<
""" % locals(), file=sio)
print(' << " " << '.join(["nd_collapse_ %s[" %
ipos + str(i) + "]" for i in xrange(nd)]),
file=sio)
print('<< "\\n";', file=sio)
# update the local stride.
for ipos in xrange(len(node.inputs)):
print("""
for(int i=nd_collapse-1;i>0;i--){
if(nd_collapse_[i]==1){
local_str[%(ipos)s][i-1]=local_str[%(ipos)s][i];//set new strides
for(int j=i+1;j<nd_collapse;j++)//remove stride i from the array
local_str[%(ipos)s][j-1]=local_str[%(ipos)s][j];
}
}
""" % locals(), file=sio)
for ipos in xrange(len(node.outputs)):
print("""
for(int i=nd_collapse-1;i>0;i--){
if(nd_collapse_[i]==1){
local_ostr[%(ipos)s][i-1]=local_ostr[%(ipos)s][i];//set new strides
for(int j=i+1;j<nd_collapse;j++)//remove stride i from the array
local_ostr[%(ipos)s][j-1]=local_ostr[%(ipos)s][j];
}
}
""" % locals(), file=sio)
# update the local dims.
print("""
for(int i=nd_collapse-1;i>0;i--){
if(nd_collapse_[i]==1){
local_dims[i-1]*=local_dims[i];//set new dims
for(int j=i+1;j<nd_collapse;j++)//remove dims i from the array
local_dims[j-1]=local_dims[j];
}
}
""" % locals(), file=sio)
# update the new number of dim
print("""
for(int i=1, end=nd_collapse;i<end;i++){
if(nd_collapse_[i]==1)nd_collapse--;
}
if(nd_collapse == 1 """ % locals(), file=sio)
l = ["local_str[%s][nd_collapse-1]==1 " %
ipos for ipos in xrange(len(node.inputs)) if not
_logical_scalar(node.inputs[ipos])]
l += ["local_ostr[%s][nd_collapse-1]==1 " %
ipos for ipos in xrange(len(node.outputs)) if not
_logical_scalar(node.outputs[ipos])]
if len(l) > 0:
print(" && ", " && ".join(l), file=sio)
print("""){nd_collapse=0;} """, file=sio)
if self.verbose:
print('std::cerr <<"after can_collapse\\n";', file=sio)
print("""std::cerr << "nd_collapse " << nd_collapse << "\\n"; """ % locals(), file=sio)
if self.verbose > 1:
for d in xrange(nd):
print('std::cerr << " " << local_dims[%(d)s]; ' %
locals(),
file=sio)
print('std::cerr << "\\n";', file=sio)
if nd > 0:
for ipos in xrange(len(node.inputs)):
print(
'std::cerr << " local_str % (ipos)s: " <<' %
locals() + ' << " " << '.join(
["local_str[%s][%s]" %
(ipos, x) for x in xrange(nd)]) +
'<<"\\n";', file=sio)
for ipos in xrange(len(node.outputs)):
print('std::cerr << " local_ostr % (ipos)s: " <<' %
locals() + ' << " " << '.join(
["local_ostr[%s][%s]" %
(ipos, x) for x in xrange(nd)]) +
'<<"\\n";', file=sio)
def launch_Ccontiguous(nodename, scalar_op, sync=True):
kernel_call_args = ["numEls"]
for ipos in xrange(len(node.inputs)):
kernel_call_args.append("i%i_data" % ipos)
for ipos in xrange(len(node.outputs)):
kernel_call_args.append("o%i_data" % ipos)
kernel_call_args = ", ".join(kernel_call_args)
verb = ""
if self.verbose:
verb = 'std::cerr << " Running ccontiguous version\\n";'
print("""
//first use at least a full warp
int threads_per_block = std::min(numEls, (unsigned int)32); //WARP SIZE
//next start adding multiprocessors
int n_blocks = std::min(numEls/threads_per_block + (numEls %% threads_per_block?1:0), (unsigned int)30); // UP TO NUMBER OF MULTIPROCESSORS
// next start adding more warps per multiprocessor
if (threads_per_block * n_blocks < numEls)
threads_per_block = std::min(numEls/n_blocks, (unsigned int)NUM_VECTOR_OP_THREADS_PER_BLOCK);
kernel_%(scalar_op)s_%(nodename)s_Ccontiguous<<<n_blocks, threads_per_block>>>(%(kernel_call_args)s);
//std::cerr << "calling callkernel returned\\n";
""" % locals(), file=sio)
if sync:
print("""
CNDA_THREAD_SYNC;
cudaError_t err = cudaGetLastError();
if( cudaSuccess != err)
{
PyErr_Format(PyExc_RuntimeError, "Cuda error: %%s: %%s.\\n n_blocks=%%i threads_per_block=%%i\\n Call: %%s\\n",
"GpuElemwise %(nodename)s %(scalar_op)s", cudaGetErrorString(err),
n_blocks, threads_per_block,
"kernel_%(scalar_op)s_%(nodename)s_Ccontiguous<<<n_blocks, threads_per_block>>>(%(kernel_call_args)s)");
return -1;
}
%(verb)s
return 0;
""" % locals(), file=sio)
else:
print(" return 0; " % locals(), file=sio)
def launch_General(nodename, scalar_op, force_nd, sync=True):
# kernel_call_args are used to invoke the cuda kernel
local = "local_"
kernel_call_args = ["numEls"]
kernel_call_args.extend(
local + "dims[%i]" %
di for di in xrange(force_nd))
for ipos in xrange(len(node.inputs)):
kernel_call_args += ["i%i_data" % ipos] + list(
local + "str[%i][%i]" %
(ipos, di) for di in xrange(force_nd))
# strides = ", ".join("i%i_str[%i]"%(ipos, di) for di in xrange(force_nd))
# kernel_call_args.append( "%s, i%i_data" % (strides, ipos))
for ipos in xrange(len(node.outputs)):
kernel_call_args += ["o%i_data" % ipos] + list(
local + "ostr[%i][%i]" %
(ipos, di) for di in xrange(force_nd))
# strides = ", ".join("o%i_str[%i]"%(ipos, di) for di in xrange(force_nd))
# kernel_call_args.append( "%s, o%i_data" % (strides, ipos))
if self.verbose:
print("""
std::cerr << " Running general version with %(force_nd)s dims\\n";
""" % locals(), file=sio)
print("std::cerr << " + ' << " " << '.join(
kernel_call_args) + ' << "\\n";', file=sio)
# std::cerr << numEls << dims[0] << i0_data, i0_str[0] << o0_data, o0_str[0]\n;
kernel_call_args = ", ".join(kernel_call_args)
print("""
//first use at least a full warp
int threads_per_block = std::min(numEls, (unsigned int)32); //WARP SIZE
//next start adding multiprocessors
int n_blocks = std::min(numEls/threads_per_block + (numEls %% threads_per_block?1:0), (unsigned int)30); // UP TO NUMBER OF MULTIPROCESSORS
// next start adding more warps per multiprocessor
if (threads_per_block * n_blocks < numEls)
threads_per_block = std::min(numEls/n_blocks, (unsigned int)NUM_VECTOR_OP_THREADS_PER_BLOCK);
kernel_%(scalar_op)s_%(nodename)s_%(force_nd)s<<<n_blocks, threads_per_block>>>(%(kernel_call_args)s);
""" % locals(), file=sio)
if sync:
print("""
CNDA_THREAD_SYNC;
cudaError_t err = cudaGetLastError();
if( cudaSuccess != err)
{
PyErr_Format(PyExc_RuntimeError, "Cuda error: %%s: %%s.\\n n_blocks=%%i threads_per_block=%%i\\n Call: %%s\\n",
"GpuElemwise %(nodename)s %(scalar_op)s", cudaGetErrorString(err),
n_blocks, threads_per_block,
"kernel_%(scalar_op)s_%(nodename)s_Ccontiguous<<<n_blocks, threads_per_block>>>(%(kernel_call_args)s)");
return -1;
}
return 0;
""" % locals(), file=sio)
else:
print(" return 0; " % locals(), file=sio)
print("if(numEls==0) return 0;", file=sio)
print("switch (nd_collapse==0?0:min(%(nd)s,nd_collapse)) {" %
locals(), file=sio)
print("case 0: {", file=sio)
launch_Ccontiguous(nodename, scalar_op, self.sync)
print(" } break;", file=sio)
for i in xrange(1, nd + 1):
print("case " + str(i) + ": {", file=sio)
launch_General(nodename, scalar_op, i, self.sync)
print(" } break;", file=sio)
print("}", file=sio) # end case
print("return -2;", file=sio) # should not get to this point
print("}", file=sio) # end fct
# N.B. cudaGetLastError is called by c_code
return sio.getvalue()
def c_support_code_apply(self, node, nodename):
nd = node.outputs[0].type.ndim
defines = """
#define INTDIV_POW2(a, b) (a >> b)
#define INTMOD_POW2(a, b) (a & ((1<<b)-1))
"""
kernels = "".join(
[self.c_src_kernel(node, nodename, x)
for x in xrange(1, nd + 1)] +
[self.c_src_kernel_Ccontiguous(node, nodename)] +
[self.c_src_callkernel(node, nodename)])
return defines + kernels
def c_support_code(self):
return self.scalar_op.c_support_code()
def c_code(self, node, nodename, inputs, outputs, sub):
d = dict(sub)
nd = node.outputs[0].type.ndim
d.update(locals())
sio = StringIO()
nin = len(inputs)
nout = len(outputs)
fail = sub['fail']
opname = str(self.scalar_op)
initial_dims = ','.join('1' for i in xrange(nd))
if 1 or self.scalar_op == scalar.pow:
print("""
//std::cerr << "C_CODE %(opname)s START\\n";
//standard elemwise size checks
""" % locals(), file=sio)
if nd > 0:
print("""
int dims[%(nd)s] = {%(initial_dims)s};
""" % locals(), file=sio)
else:
print("""
int *dims = NULL;
""", file=sio)
# check that all inputs have valid dimensions
emitted_inames = {}
for id, iname in enumerate(inputs):
if iname in emitted_inames:
assert emitted_inames[iname] is node.inputs[id]
continue
# with python 2.4 (at least), if a broadcastable pattern is made of
# numpy.bool_ instead of bool, calling int() once is not enough.
broadcasts = map(int, map(int, node.inputs[id].broadcastable))
broadcasts = ', '.join(map(str, broadcasts))
nd = node.inputs[id].ndim
if nd > 0:
print("""
int broadcasts_%(iname)s[%(nd)s] = {%(broadcasts)s};
""" % locals(), file=sio)
else:
print("""
int *broadcasts_%(iname)s = NULL;
""" % locals(), file=sio)
emitted_inames[iname] = node.inputs[id]
# check that all inputs have valid dimensions
emitted_inames = {}
for id, iname in enumerate(inputs):
if iname in emitted_inames:
continue
print("""
//std::cerr << "C_CODE %(opname)s checking input %(iname)s\\n";
if (%(nd)s != %(iname)s->nd)
{
PyErr_Format(PyExc_TypeError,
"need %(nd)s dims, not %%i", %(iname)s->nd);
%(fail)s;
}
for (int i = 0; i< %(nd)s; ++i)
{
dims[i] = (dims[i] == 1) ? CudaNdarray_HOST_DIMS(%(iname)s)[i] : dims[i];
if ((!(broadcasts_%(iname)s[i] &&
CudaNdarray_HOST_DIMS(%(iname)s)[i] == 1)) &&
(dims[i] != CudaNdarray_HOST_DIMS(%(iname)s)[i]))
{
//std::cerr << "C_CODE %(opname)s checking input %(iname)s failed\\n";
PyErr_Format(PyExc_ValueError,
"GpuElemwise. Input dimension mis-match. Input"
" %(id)d (indices start at 0) has shape[%%i] == %%i"
", but the output's size on that axis is %%i.",
i,
CudaNdarray_HOST_DIMS(%(iname)s)[i],
dims[i]
);
%(fail)s;
}
}
""" % locals(), file=sio)
emitted_inames[iname] = True
# check that all outputs have valid dimensions
for idx, oname in enumerate(outputs):
if idx not in self.inplace_pattern.keys():
print("""
for (int i = 0; (i< %(nd)s) && (%(oname)s); ++i) {
if (dims[i] != CudaNdarray_HOST_DIMS(%(oname)s)[i])
{
Py_DECREF(%(oname)s);
%(oname)s = NULL;
}
}
if (%(oname)s && !CudaNdarray_is_c_contiguous(%(oname)s))
{
Py_XDECREF(%(oname)s);
%(oname)s = NULL;
}
if (NULL == %(oname)s)
{
%(oname)s = (CudaNdarray*)CudaNdarray_New();
if (!%(oname)s)
{
//error string already set
%(fail)s;
}
if (CudaNdarray_alloc_contiguous(%(oname)s, %(nd)s, dims))
{
//error string already set
Py_DECREF(%(oname)s);
%(oname)s = NULL;
%(fail)s;
}
}
//std::cerr << "ELEMWISE NEW %(oname)s nd" << %(oname)s->nd << "\\n";
//std::cerr << "ELEMWISE NEW %(oname)s data" << %(oname)s->devdata << "\\n";
""" % locals(), file=sio)
else:
input_idx = self.inplace_pattern[idx]
iname = inputs[input_idx]
print("""
Py_XDECREF(%(oname)s);
%(oname)s = %(iname)s;
Py_INCREF(%(oname)s);
for (int i = 0; (i< %(nd)s) && (%(oname)s); ++i) {
if (dims[i] != CudaNdarray_HOST_DIMS(%(oname)s)[i])
{
PyErr_Format(PyExc_ValueError,
"GpuElemwise. Output dimension mis-match. Output"
" %(idx)d (indices start at 0), working inplace"
" on input %(input_idx)s, has shape[%%i] == %%i"
", but the output's size on that axis is %%i.",
i,
CudaNdarray_HOST_DIMS(%(oname)s)[i],
dims[i]
);
Py_DECREF(%(oname)s);
%(oname)s = NULL;
%(fail)s;
}
}
//std::cerr << "ELEMWISE NEW %(oname)s nd" << %(oname)s->nd << "\\n";
//std::cerr << "ELEMWISE NEW %(oname)s data" << %(oname)s->devdata << "\\n";
""" % locals(), file=sio)
print("""
{
//new block so that failure gotos don't skip over variable initialization
//std::cerr << "calling callkernel\\n";
if (callkernel_%(nodename)s(1, 0, dims
""" % locals(), file=sio)
for iname in inputs:
print("""
, CudaNdarray_DEV_DATA(%(iname)s), CudaNdarray_HOST_STRIDES(%(iname)s)
""" % locals(), file=sio)
for oname in outputs:
print("""
, CudaNdarray_DEV_DATA(%(oname)s), CudaNdarray_HOST_STRIDES(%(oname)s)
""" % locals(), file=sio)
print("""
))
{
// error
""", file=sio)
for oname in outputs:
print("""
Py_DECREF(%(oname)s);
%(oname)s = NULL;
""" % locals(), file=sio)
print("""
%(fail)s;
}
else // no error
{
}
}
//std::cerr << "C_CODE %(opname)s END\\n";
""" % locals(), file=sio)
# print sio.getvalue()
return sio.getvalue()
class ErfinvGPU(Erfinv):
"""
Provides a c-code implementation of the inverse error function for GPU.
Notes
-----
We do not add this c_code to theano.scalar.basic_scipy.Erfinv, as we
currently rely on Nvidia's cublas library to provide the erfinv
c-implementation (which requires different c_headers). As it stands,
theano.scalar.basic_scipy.Erfinv does not have c_code as scipy does not
export the required C function.
"""
def c_headers(self):
return ['math_functions.h', 'cublas_v2.h']
def c_code(self, node, name, inp, out, sub):
x, = inp
z, = out
if node.inputs[0].type in complex_types:
raise NotImplementedError('type not supported', type)
return "%(z)s = erfinv(%(x)s);" % locals()
erfinv_gpu = ErfinvGPU(upgrade_to_float_no_complex, name='erfinv_gpu')
class ErfcxGPU(Erfinv):
"""
Provides a c-code implementation of the scaled complementary error function
for GPU.
Notes
-----
We do not add this c_code to theano.scalar.basic_scipy.Erfcx, as we
currently rely on Nvidia's cublas library to provide the erfcx
c-implementation (which requires different c_headers). As it stands,
theano.scalar.basic_scipy.Erfcx does not have c_code as scipy does not
export the required C function.
"""
def c_headers(self):
return ['math_functions.h', 'cublas_v2.h']
def c_code(self, node, name, inp, out, sub):
x, = inp
z, = out
if node.inputs[0].type in complex_types:
raise NotImplementedError('type not supported', type)
return "%(z)s = erfcx(%(x)s);" % locals()
erfcx_gpu = ErfcxGPU(upgrade_to_float_no_complex, name='erfcx_gpu')
|
py | b416b7afb449848e13295eab0325a13e3d02658c | """Transfer function models for transfer function fitting."""
import control
import numpy as np
from .model import Model
import kontrol.core.math as math
import kontrol
class TransferFunctionModel(Model):
r"""Transfer function model class defined by numerator and denominator
Parameters
----------
nzero : int
Number of zeros.
npole : int
Number of poles.
args : array or None, optional.
The model parameters.
Structured as follows:
[b_n, b_n-1,..., b_1, b_0, a_m, a_m-1,..., a_1, a_0],
where b and a are the coefficients of the numerator and denominator
respectively, ordered from higher-order to lower-order.
Defaults to None.
Attributes
----------
tf : kontrol.TransferFunction
The last evaluted transfer function.
Notes
-----
The transfer function model is defined as
.. math::
G(s, b_n, b_{n-1},..., b_1, b_0, a_m, a_{m-1},..., a_1, a_0)
= \frac{\sum_{i=0}^{n} b_i s^i}{\sum_{j=0}^{m} a_j s^j}
"""
def __init__(self, nzero, npole, args=None, log_args=False):
"""Constructor
Parameters
----------
nzero : int
Number of zeros.
npole : int
Number of poles.
args : array or None, optional.
The model parameters.
Structured as follows:
[b_n, b_n-1,..., b_1, b_0, a_m, a_m-1,..., a_1, a_0],
where b and a are the coefficients of the numerator and denominator
respectively, ordered from higher-order to lower-order.
Defaults to None.
"""
super().__init__(args=args, log_args=log_args)
self._nzero = None
self._npole = None
self.nzero = nzero
self.npole = npole
def _x2y(self, x, xunit="Hz"):
"""Transfer function frequency response.
Parameters
----------
x : array
Independent variable in units specified by ``xunit``.
xunit : str, optional.
Unit of ``x``.
Choose from ["Hz", "rad/s", "s"].
Defaults to "Hz".
Returns
-------
y : array
Frequency response of the transfer function in complex values.
"""
if len(self.num) + len(self.den) != self.nzero + self.npole + 2:
raise ValueError("len(args) must be nzero+npole+2")
num = self.num
den = self.den
s = _x2s(x, xunit)
num_poly = math.polyval(num, s)
den_poly = math.polyval(den, s)
y = num_poly/den_poly
return y
@property
def nzero(self):
"""Number of zeros"""
return self._nzero
@nzero.setter
def nzero(self, _nzero):
"""nzero.setter"""
self._nzero = _nzero
@property
def npole(self):
"""Number of zeros"""
return self._npole
@npole.setter
def npole(self, _npole):
"""npole.setter"""
self._npole = _npole
@property
def num(self):
"""Numerator array"""
return self.args[:self.nzero+1]
@property
def den(self):
"""Denominator array"""
return self.args[self.nzero+1:]
@property
def tf(self):
"""The Transfer Function object."""
# if self.log_args:
# num = 10**self.num
# den = 10**self.den
# else:
num = self.num
den = self.den
return kontrol.TransferFunction(num, den)
class DampedOscillator(TransferFunctionModel):
r"""Transfer function model for a damped oscillator.
Parameters
----------
args : array or None, optional.
The model parameters with three numbers.
Structured as follows:
``args = [k, fn, q]``,
where ``k`` is the DC gain of the transfer function,
``fn`` is the resonance frequency in Hz, and ``q`` is the Q-factor.
Defaults to None.
Notes
-----
The model is definded as
.. math::
G(s; k, \omega_n, q) =
k\frac{\omega_n^2}{s^2 + \frac{\omega_n}{q}s + \omega_n^2}
where :math:`k` is the DC gain of the transfer function, :math:`\omega_n`
is the resonance frequency of the oscillator, and :math:`q` is the
Q-factor if the damped oscillator.
"""
def __init__(self, args=None):
"""Constructor
Parameters
----------
args : array or None, optional.
The model parameters with three numbers.
Structured as follows:
``args = [k, fn, q]``,
where ``k`` is the DC gain of the transfer function,
``fn`` is the resonance frequency in Hz, and ``q`` is the Q-factor.
Defaults to None.
"""
super().__init__(nzero=0, npole=2)
self._args = None
self.args = args
@property
def damped_oscillator_args(self):
"""The model parameters with three numbers [k, fn, q]"""
return self._damped_oscillator_args
@damped_oscillator_args.setter
def damped_oscillator_args(self, _damped_oscillator_args):
"""damped_oscillator_args.setter"""
self._damped_oscillator_args = _damped_oscillator_args
# Overriding self.args in kontrol.curvefit.model.Model()
@property
def args(self):
"""Model parameters"""
return self._args
@args.setter
def args(self, _args):
"""args.setter"""
if _args is None:
self._args = None
else:
if len(_args) != 3:
raise ValueError("args must be in the format [k, fn, q].")
self.damped_oscillator_args = _args
k = self.gain
fn = self.fn
q = self.q
wn = 2*np.pi*fn
args = np.array([k*wn**2, 1, wn/q, wn**2]) # Convert to num, den.
self._args = args
@property
def gain(self):
"""DC gain"""
return self.damped_oscillator_args[0]
@property
def fn(self):
"""Resonance frequency"""
return self.damped_oscillator_args[1]
@property
def q(self):
"""Q factor"""
return self.damped_oscillator_args[2]
class CoupledOscillator(TransferFunctionModel):
""""""
def __init__(self):
""""""
pass
# Taking a break.
class SimpleZPK(Model):
r"""ZPK model with simple poles and zeros.
Parameters
----------
nzero : int
Number of simple zeros.
npole : int
Number of simple poles.
args : array, optional
The model parameters defined as
``args = [z1, z2,..., p1, p2,..., k]``
where ``z`` are the locations of the zeros in Hz,
``p`` are the locations of the pole in Hz, and
``k`` is the static gain,
log_args : boolean, optional
If true,
model parameters passed to the model are assumed to be passed
through a log10() function.
So, when the real parameters will be assumed to be
10**args instead.
Defaults to False.
Attributes
----------
zero : array
List of zeros.
pole : array
List of poles.
gain : float
Static gain.
tf : kontrol.TranserFunction
The transfer function representation of this ZPK model
Notes
-----
The simple ZPK model is defined as
.. math::
G(s; z_1, z_2,..., p_1, p_2, ..., k)
= k\frac{\prod_i(\frac{s}{2\pi z_i}+1)}{\prod_j(\frac{s}{2\pi p_j}+1)}
"""
def __init__(self, nzero, npole, args=None, log_args=False):
r"""Constructor.
Parameters
----------
nzero : int
Number of simple zeros.
npole : int
Number of simple poles.
args : array, optional
The model parameters defined as
``args = [z1, z2,..., p1, p2,..., k]``
where ``z`` are the locations of the zeros in Hz,
``p`` are the locations of the pole in Hz, and
``k`` is the static gain,
log_args : boolean, optional
If true,
model parameters passed to the model are assumed to be passed
through a log10() function.
So, when the real parameters will be assumed to be
10**args instead.
Defaults to False.
"""
self._nzero = None
self._npole = None
self.nzero = nzero
self.npole = npole
super().__init__(args=args, log_args=log_args)
def _x2y(self, x, xunit="Hz"):
"""ZPK model frequency response."""
s = _x2s(x, xunit)
tf = np.ones_like(s) * self.gain
for i in range(len(self.zero)):
tf *= s/(2*np.pi*self.zero[i]) + 1
for i in range(len(self.pole)):
tf /= s/(2*np.pi*self.pole[i]) + 1
return tf
@property
def nzero(self):
"""Number of simple zeros"""
return self._nzero
@nzero.setter
def nzero(self, _nzero):
"""nzero.setter"""
self._nzero = _nzero
@property
def npole(self):
"""Number of complex pole pairs"""
return self._npole
@npole.setter
def npole(self, _npole):
"""npole.setter"""
self._npole = _npole
@property
def args(self):
"""Model parameters in ZPK [z1, z2,..., p1, p2,..., k] format"""
return self._args
@args.setter
def args(self, _args):
"""args.setter"""
if _args is None:
self._args = None
elif len(_args) != self.nzero + self.npole + 1:
raise ValueError("Length of argument must match nzero and npole.")
else:
if self.log_args:
_args = 10**_args
self._args = _args
#TODO Maybe I should consider setter for zero, pole and gain.
#These can be used to change the args as well but could be tedious.
@property
def zero(self):
"""List of zeros"""
return self.args[:self.nzero]
@property
def pole(self):
"""List of poles"""
return self.args[self.nzero:-1]
@property
def gain(self):
"""Static gain"""
return self.args[-1]
@property
def tf(self):
"""Returns a TransferFunction object of this ZPK model"""
s = control.tf("s")
tf = control.tf([self.gain], [1])
for i in range(len(self.zero)):
tf *= s/(2*np.pi*self.zero[i]) + 1
for i in range(len(self.pole)):
tf /= s/(2*np.pi*self.pole[i]) + 1
return kontrol.TransferFunction(tf)
class ComplexZPK(Model):
r"""ZPK model with complex poles and zeros.
Parameters
----------
nzero_pairs : int
Number of complex zero pairs.
npole_pairs : int
Number of complex pole pairs.
args : array, optional
The model parameters defined as
``args = [f1, q1, f2, q2,..., fi, qi,..., fn, qn, k]``,
where ``f`` are resonance frequencies of the complex
zero/pole pairs, ``q`` are the quality factors, and
``k`` is the static gain, ``i`` is the number of complex zero
pairs, and ``n-i`` is the number of of complex pole pairs.
log_args : boolean, optional
If true,
model parameters passed to the model are assumed to be passed
through a log10() function.
So, when the real parameters will be assumed to be
10**args instead.
Defaults to False.
Attributes
----------
fn_zero : array
List of resonance frequencies of the complex zeros.
fn_pole : array
List of resonance frequencies of the complex poles.
q_zero : array
List of Q-factors of the complex zeros.
q_pole : array
List of Q-factors of the complex poles.
Notes
-----
The complex ZPK model is defined by:
.. math::
G(s; f_i, q_i,k)
=k\frac{\prod_i(\frac{s^2}{(2\pi f_i)^2} + \frac{1}{2\pi f_i q_i}s + 1)}
{\prod_j(\frac{s^2}{(2\pi f_j)^2} + \frac{1}{2\pi f_j q_j}s + 1)}
"""
def __init__(
self, nzero_pairs, npole_pairs, args=None, log_args=False):
r"""Constructor.
Parameters
----------
nzero_pairs : int
Number of complex zero pairs.
npole_pairs : int
Number of complex pole pairs.
args : array, optional
The model parameters defined as
``args = [f1, q1, f2, q2,..., fi, qi,..., fn, qn, k]``,
where ``f`` are resonance frequencies of the complex
zero/pole pairs, ``q`` are the quality factors, and
``k`` is the static gain, ``i`` is the number of complex zero
pairs, and ``n-i`` is the number of of complex pole pairs.
log_args : boolean, optional
If true,
model parameters passed to the model are assumed to be passed
through a log10() function.
So, when the real parameters will be assumed to be
10**args instead.
Defaults to False.
Example
-------
with ``nzero_pairs = 1``, ``npole_pairs = 2``,
args = [1, 2, 3, 4, 5, 6, 7] refers to a transfer function
.. math::
G(s) = 7\frac{\frac{1}{(1\times 2\pi)^2}s^2
+ \frac{1}{2\times 1 \ times 2\pi}s + 1}
{\left(\frac{1}{(3\times 2\pi)^2}s^2
+ \frac{1}{4\times 3\times 2\pi}s + 1\right)
\left(\frac{1}{(3\times 2\pi)^2}s^2
+ \frac{1}{4\times 3\times 2\pi}s + 1\right)}
"""
self._nzero_pairs = None
self._npole_pairs = None
self._fn_zero = None
self._fn_pole = None
self._q_zero = None
self._q_pole = None
self._gain = None
self.nzero_pairs = nzero_pairs
self.npole_pairs = npole_pairs # self.npole_pairs is not used.
super().__init__(args=args, log_args=log_args)
def _x2y(self, x, xunit="Hz"):
"""ZPK model (complex) frequency response."""
s = _x2s(x, xunit)
fn_zero = self.fn_zero
q_zero = self.q_zero
fn_pole = self.fn_pole
q_pole = self.q_pole
num = np.ones_like(s) * self.gain
den = np.ones_like(s)
for i in range(len(fn_zero)):
num *= (1 / (2*np.pi*fn_zero[i])**2 * s**2
+ 1 / (2*np.pi*fn_zero[i]*q_zero[i]) * s
+ 1)
for i in range(len(fn_pole)):
den *= (1 / (2*np.pi*fn_pole[i])**2 * s**2
+ 1 / (2*np.pi*fn_pole[i]*q_pole[i]) * s
+ 1)
return num/den
@property
def nzero_pairs(self):
"""Number of complex zero pairs"""
return self._nzero_pairs
@nzero_pairs.setter
def nzero_pairs(self, _nzero_pairs):
"""nzero_pairs.setter"""
self._nzero_pairs = _nzero_pairs
@property
def npole_pairs(self):
"""Number of complex pole pairs"""
return self._npole_pairs
@npole_pairs.setter
def npole_pairs(self, _npole_pairs):
"""npole_pairs.setter"""
self._npole_pairs = _npole_pairs
@property
def args(self):
"""Model parameters in ZPK [f1, q1, f2, q2,..., fn, qn, k] format"""
return self._args
@args.setter
def args(self, _args):
"""args.setter"""
if _args is None:
self._args = None
elif np.mod(len(_args), 2) != 1:
raise ValueError("Length of argument must be odd number "
"and in the format "
"[f1, q1, f2, q2,..., fn, qn, k]")
elif len(_args) != self.nzero_pairs*2 + self.npole_pairs*2 + 1:
raise ValueError("Length of argument must match the specfied "
"nzero_pairs and npole_pairs.")
else:
if self.log_args:
_args = 10**_args
self._args = _args
@property
def fn_zero(self):
"""List of resonance frequencies of complex zeros."""
return self.args[:int(self.nzero_pairs*2):2]
@property
def fn_pole(self):
"""List of resonance frequencies of complex poles."""
return self.args[int(self.nzero_pairs*2):-1:2]
@property
def q_zero(self):
"""List of quality factors of the complex zeros"""
return self.args[1:int(self.nzero_pairs*2):2]
@property
def q_pole(self):
"""List of quality factors of the complex poles"""
return self.args[int(self.nzero_pairs*2)+1:-1:2]
@property
def gain(self):
"""Static gain."""
return self.args[-1]
@property
def tf(self):
"""Returns a TransferFunction object of this ZPK model"""
s = control.tf("s")
fn_zero = self.fn_zero
q_zero = self.q_zero
fn_pole = self.fn_pole
q_pole = self.q_pole
k = self.gain
num = control.tf([k], [1])
den = control.tf([1], [1])
for i in range(len(fn_zero)):
num *= (1 / (2*np.pi*fn_zero[i])**2 * s**2
+ 1 / (2*np.pi*fn_zero[i]*q_zero[i]) * s
+ 1)
for i in range(len(fn_pole)):
den *= (1 / (2*np.pi*fn_pole[i])**2 * s**2
+ 1 / (2*np.pi*fn_pole[i]*q_pole[i]) * s
+ 1)
return kontrol.TransferFunction(num/den)
#TODO add support for a generic ZPK model
def _x2s(x, xunit):
"""Converts the independent variable to the complex variable s.
Parameters
----------
x : array
Independent variable in units specified by ``xunit``.
xunit : str,
Unit of ``x``.
Choose from ["Hz", "rad/s", "s"].
Returns
-------
s : array
The complex variable.
"""
if xunit == "Hz":
s = 1j*2*np.pi*x
elif xunit == "rad/s":
s = 1j*x
elif xunit == "s":
s = x
else:
raise ValueError("Invalid specification for xunit."
"Please choose xunit from 'Hz', 'rad/s', or 's'.")
return s
|
py | b416b818f50f3fbf5b2e624b24eaed4ca6195016 | # This test module covers support in various parts of the standard library
# for working with modules located inside zipfiles
# The tests are centralised in this fashion to make it easy to drop them
# if a platform doesn't support zipimport
import unittest
import test.test_support
import os
import os.path
import sys
import textwrap
import zipfile
import zipimport
import doctest
import inspect
import linecache
import pdb
verbose = test.test_support.verbose
# Library modules covered by this test set
# pdb (Issue 4201)
# inspect (Issue 4223)
# doctest (Issue 4197)
# Other test modules with zipimport related tests
# test_zipimport (of course!)
# test_cmd_line_script (covers the zipimport support in runpy)
# Retrieve some helpers from other test cases
from test import test_doctest, sample_doctest
from test.test_importhooks import ImportHooksBaseTestCase
from test.test_cmd_line_script import temp_dir, _run_python, \
_spawn_python, _kill_python, \
_make_test_script, \
_compile_test_script, \
_make_test_zip, _make_test_pkg
def _run_object_doctest(obj, module):
# Direct doctest output (normally just errors) to real stdout; doctest
# output shouldn't be compared by regrtest.
save_stdout = sys.stdout
sys.stdout = test.test_support.get_original_stdout()
try:
finder = doctest.DocTestFinder(verbose=verbose, recurse=False)
runner = doctest.DocTestRunner(verbose=verbose)
# Use the object's fully qualified name if it has one
# Otherwise, use the module's name
try:
name = "%s.%s" % (obj.__module__, obj.__name__)
except AttributeError:
name = module.__name__
for example in finder.find(obj, name, module):
runner.run(example)
f, t = runner.failures, runner.tries
if f:
raise test.test_support.TestFailed("%d of %d doctests failed" % (f, t))
finally:
sys.stdout = save_stdout
if verbose:
print 'doctest (%s) ... %d tests with zero failures' % (module.__name__, t)
return f, t
class ZipSupportTests(ImportHooksBaseTestCase):
# We use the ImportHooksBaseTestCase to restore
# the state of the import related information
# in the sys module after each test
# We also clear the linecache and zipimport cache
# just to avoid any bogus errors due to name reuse in the tests
def setUp(self):
linecache.clearcache()
zipimport._zip_directory_cache.clear()
ImportHooksBaseTestCase.setUp(self)
def test_inspect_getsource_issue4223(self):
test_src = "def foo(): pass\n"
with temp_dir() as d:
init_name = _make_test_script(d, '__init__', test_src)
name_in_zip = os.path.join('zip_pkg',
os.path.basename(init_name))
zip_name, run_name = _make_test_zip(d, 'test_zip',
init_name, name_in_zip)
os.remove(init_name)
sys.path.insert(0, zip_name)
import zip_pkg
self.assertEqual(inspect.getsource(zip_pkg.foo), test_src)
def test_doctest_issue4197(self):
# To avoid having to keep two copies of the doctest module's
# unit tests in sync, this test works by taking the source of
# test_doctest itself, rewriting it a bit to cope with a new
# location, and then throwing it in a zip file to make sure
# everything still works correctly
test_src = inspect.getsource(test_doctest)
test_src = test_src.replace(
"from test import test_doctest",
"import test_zipped_doctest as test_doctest")
test_src = test_src.replace("test.test_doctest",
"test_zipped_doctest")
test_src = test_src.replace("test.sample_doctest",
"sample_zipped_doctest")
sample_src = inspect.getsource(sample_doctest)
sample_src = sample_src.replace("test.test_doctest",
"test_zipped_doctest")
with temp_dir() as d:
script_name = _make_test_script(d, 'test_zipped_doctest',
test_src)
zip_name, run_name = _make_test_zip(d, 'test_zip',
script_name)
z = zipfile.ZipFile(zip_name, 'a')
z.writestr("sample_zipped_doctest.py", sample_src)
z.close()
if verbose:
zip_file = zipfile.ZipFile(zip_name, 'r')
print 'Contents of %r:' % zip_name
zip_file.printdir()
zip_file.close()
os.remove(script_name)
sys.path.insert(0, zip_name)
import test_zipped_doctest
# Some of the doc tests depend on the colocated text files
# which aren't available to the zipped version (the doctest
# module currently requires real filenames for non-embedded
# tests). So we're forced to be selective about which tests
# to run.
# doctest could really use some APIs which take a text
# string or a file object instead of a filename...
known_good_tests = [
test_zipped_doctest.SampleClass,
test_zipped_doctest.SampleClass.NestedClass,
test_zipped_doctest.SampleClass.NestedClass.__init__,
test_zipped_doctest.SampleClass.__init__,
test_zipped_doctest.SampleClass.a_classmethod,
test_zipped_doctest.SampleClass.a_property,
test_zipped_doctest.SampleClass.a_staticmethod,
test_zipped_doctest.SampleClass.double,
test_zipped_doctest.SampleClass.get,
test_zipped_doctest.SampleNewStyleClass,
test_zipped_doctest.SampleNewStyleClass.__init__,
test_zipped_doctest.SampleNewStyleClass.double,
test_zipped_doctest.SampleNewStyleClass.get,
test_zipped_doctest.old_test1,
test_zipped_doctest.old_test2,
test_zipped_doctest.old_test3,
test_zipped_doctest.old_test4,
test_zipped_doctest.sample_func,
test_zipped_doctest.test_DocTest,
test_zipped_doctest.test_DocTestParser,
test_zipped_doctest.test_DocTestRunner.basics,
test_zipped_doctest.test_DocTestRunner.exceptions,
test_zipped_doctest.test_DocTestRunner.option_directives,
test_zipped_doctest.test_DocTestRunner.optionflags,
test_zipped_doctest.test_DocTestRunner.verbose_flag,
test_zipped_doctest.test_Example,
test_zipped_doctest.test_debug,
test_zipped_doctest.test_pdb_set_trace,
test_zipped_doctest.test_pdb_set_trace_nested,
test_zipped_doctest.test_testsource,
test_zipped_doctest.test_trailing_space_in_test,
test_zipped_doctest.test_DocTestSuite,
test_zipped_doctest.test_DocTestFinder,
]
# These remaining tests are the ones which need access
# to the data files, so we don't run them
fail_due_to_missing_data_files = [
test_zipped_doctest.test_DocFileSuite,
test_zipped_doctest.test_testfile,
test_zipped_doctest.test_unittest_reportflags,
]
for obj in known_good_tests:
_run_object_doctest(obj, test_zipped_doctest)
def test_doctest_main_issue4197(self):
test_src = textwrap.dedent("""\
class Test:
">>> 'line 2'"
pass
import doctest
doctest.testmod()
""")
pattern = 'File "%s", line 2, in %s'
with temp_dir() as d:
script_name = _make_test_script(d, 'script', test_src)
exit_code, data = _run_python(script_name)
expected = pattern % (script_name, "__main__.Test")
if verbose:
print "Expected line", expected
print "Got stdout:"
print data
self.assert_(expected in data)
zip_name, run_name = _make_test_zip(d, "test_zip",
script_name, '__main__.py')
exit_code, data = _run_python(zip_name)
expected = pattern % (run_name, "__main__.Test")
if verbose:
print "Expected line", expected
print "Got stdout:"
print data
self.assert_(expected in data)
def test_pdb_issue4201(self):
test_src = textwrap.dedent("""\
def f():
pass
import pdb
pdb.runcall(f)
""")
with temp_dir() as d:
script_name = _make_test_script(d, 'script', test_src)
p = _spawn_python(script_name)
p.stdin.write('l\n')
data = _kill_python(p)
self.assert_(script_name in data)
zip_name, run_name = _make_test_zip(d, "test_zip",
script_name, '__main__.py')
p = _spawn_python(zip_name)
p.stdin.write('l\n')
data = _kill_python(p)
self.assert_(run_name in data)
def test_main():
test.test_support.run_unittest(ZipSupportTests)
test.test_support.reap_children()
if __name__ == '__main__':
test_main()
|
py | b416b8d50b506e167b866d7f8f72fea15a31c3a4 | # Copyright 2019 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
[概要]
ZABBIXで発生している障害を管理する
"""
import datetime
import django
import json
import os
import sys
import pytz
import traceback
# OASE モジュール importパス追加
my_path = os.path.dirname(os.path.abspath(__file__))
tmp_path = my_path.split('oase-root')
root_dir_path = tmp_path[0] + 'oase-root'
sys.path.append(root_dir_path)
os.environ['DJANGO_SETTINGS_MODULE'] = 'confs.frameworkconfs.settings'
django.setup()
from django.db import transaction
# ロガー初期化
from libs.commonlibs.oase_logger import OaseLogger
logger = OaseLogger.get_instance()
from web_app.models.ZABBIX_monitoring_models import ZabbixTriggerHistory
from libs.backyardlibs.monitoring_adapter.ZABBIX.ZABBIX_api import ZabbixApi
class ManageTrigger:
def __init__(self, zabbix_adapter_id, user):
"""
ZABBIX監視マスタID=zabbix_adapter_idとなるZABBIX障害取得履歴管理を取得する
"""
self.zabbix_adapter_id = zabbix_adapter_id
self.user = user
def main(self, triggerid_lastchange_list):
"""
[概要]
障害取得管理を行う。zabbixから取得したtriggerid,lastchangeと、
oaseに登録されているtriggerid,lastchangeを比較し登録済みの障害か、未登録の障害か調べ結果を返す
[引数]
triggerid_lastchange_list: [(trigger_id, lastchange),(trigger_id, lastchange),...]
ZABBIX apiで得られるtriggeridとlastchangeのsetのリスト
[戻り値]
list: 各trigger_idの管理状態をboolのリストとして返す。
未登録の障害又は新たに障害が発生した場合はTrue, 既に取得済みの障害の場合はFalse
例外が発生した場合は空のリストを返す
"""
logger.logic_log('LOSI00001', 'triggerid_lastchange_list: %s' % (triggerid_lastchange_list))
trigger_history_list = ZabbixTriggerHistory.objects.select_for_update().filter(zabbix_adapter_id=self.zabbix_adapter_id)
trigger_history_dict = {t.trigger_id:t for t in trigger_history_list}
result = []
active_trigger_id_list = []
for trigger_id, lastchange in triggerid_lastchange_list:
active_trigger_id_list.append(trigger_id)
# トリガーID未登録時はテーブルに追加して、新規障害が発生とする。
if not trigger_id in trigger_history_dict.keys():
new_trigger = self.create(trigger_id, lastchange, self.user)
result.append(True)
continue
if lastchange == trigger_history_dict[trigger_id].lastchange:
# lastchangeが変わっていなければ登録済み
result.append(False)
else:
# lastchangeが変わっているなら障害が発生 更新
_ = self.update(trigger_history_dict[trigger_id], lastchange, self.user)
result.append(True)
self.delete_resolved_records(trigger_history_list, active_trigger_id_list)
logger.logic_log('LOSI00002', 'result: %s' % (result))
return result
def create(self, trigger_id, lastchange, user):
"""
[概要]
レコード作成
[戻り値]
ZabbixTriggerHistory: 作成したモデルを返す。 例外の場合はNoneを返す
"""
logger.logic_log('LOSI00001', 'trigger_id: %s, lastchange: %s, user_name: %s' % (trigger_id, lastchange, user))
zabbix_trigger_his = ZabbixTriggerHistory(
zabbix_adapter_id = self.zabbix_adapter_id,
trigger_id = trigger_id,
lastchange = lastchange,
last_update_user = user,
last_update_timestamp = datetime.datetime.now(pytz.timezone('UTC')),
)
zabbix_trigger_his.save(force_insert=True)
logger.logic_log('LOSI00002', 'zabbix_trigger_history_id: %s' % (zabbix_trigger_his))
return zabbix_trigger_his
def update(self, trigger_his, lastchange, user):
"""
[概要]
lastchangeを更新する
"""
logger.logic_log('LOSI00001', 'trigger_his: %s, lastchange: %s, user: %s' % (trigger_his, lastchange, user))
trigger_his.lastchange = lastchange
trigger_his.last_update_user = user
trigger_his.last_update_timestamp = datetime.datetime.now(pytz.timezone('UTC'))
trigger_his.save(force_update=True)
logger.logic_log('LOSI00002', 'zabbix_trigger_history_id: %s' % (trigger_his))
def delete_resolved_records(self, trigger_history_list, active_trigger_id_list):
"""
[概要]
解決済みになった障害は削除する
[引数]
trigger_history_list: テーブルに登録されているZabbixTriggerHistoryのクラスのリスト
active_trigger_id_list: zabbixから取得したトリガーIDのリスト
"""
for t in trigger_history_list:
if not t.trigger_id in active_trigger_id_list:
logger.logic_log('LOSI25000', self.zabbix_adapter_id, t.trigger_id)
t.delete()
|
py | b416b90b9571c9fe04ff9d663eadf7017bae2363 |
def get_data():
import csv
# csv file name
filename = "data.csv"
# initializing the titles and rows list
fields = []
rows = []
# reading csv file
with open(filename, 'r') as csvfile:
# creating a csv reader object
csvreader = csv.reader(csvfile)
# extracting field names through first row
fields = next(csvreader)
# extracting each data row one by one
for row in csvreader:
rows.append(row)
return fields,rows
# printing first 5 rows
print('\nFirst 5 rows are:\n')
for row in rows[:5]:
# parsing each column of a row
for col in row:
print("%10s"%col),
print('\n') |
py | b416b91852822cc1c709e0743f5dfa4f8f9a62c7 | # Copyright 2019, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import numpy as np
import tensorflow as tf
from tensorflow_federated.python.common_libs import test
from tensorflow_federated.python.research.utils.datasets import stackoverflow_dataset
TEST_DATA = collections.OrderedDict(
creation_date=(['unused date']),
title=(['unused title']),
score=([tf.constant(0, dtype=tf.int64)]),
tags=(['unused test tag']),
tokens=(['one must imagine']),
type=(['unused type']),
)
def _compute_length_of_dataset(ds):
return ds.reduce(0, lambda x, _: x + 1)
class DatasetTest(tf.test.TestCase):
def test_split_input_target(self):
tokens = tf.constant([[0, 1, 2, 3, 4]], dtype=tf.int64)
expected_input = [[0, 1, 2, 3]]
expected_target = [[1, 2, 3, 4]]
split = stackoverflow_dataset.split_input_target(tokens)
self.assertAllEqual(self.evaluate(split[0]), expected_input)
self.assertAllEqual(self.evaluate(split[1]), expected_target)
def test_build_to_ids_fn_truncates(self):
vocab = ['A', 'B', 'C']
max_seq_len = 1
_, _, bos, _ = stackoverflow_dataset.get_special_tokens(len(vocab))
to_ids_fn = stackoverflow_dataset.build_to_ids_fn(vocab, max_seq_len)
data = {'tokens': 'A B C'}
processed = to_ids_fn(data)
self.assertAllEqual(self.evaluate(processed), [bos, 1])
def test_build_to_ids_fn_embeds_all_vocab(self):
vocab = ['A', 'B', 'C']
max_seq_len = 5
_, _, bos, eos = stackoverflow_dataset.get_special_tokens(len(vocab))
to_ids_fn = stackoverflow_dataset.build_to_ids_fn(vocab, max_seq_len)
data = {'tokens': 'A B C'}
processed = to_ids_fn(data)
self.assertAllEqual(self.evaluate(processed), [bos, 1, 2, 3, eos])
def test_pad_token_correct(self):
vocab = ['A', 'B', 'C']
max_seq_len = 5
to_ids_fn = stackoverflow_dataset.build_to_ids_fn(vocab, max_seq_len)
pad, _, bos, eos = stackoverflow_dataset.get_special_tokens(len(vocab))
data = {'tokens': 'A B C'}
processed = to_ids_fn(data)
batched_ds = tf.data.Dataset.from_tensor_slices([processed]).padded_batch(
1, padded_shapes=[6])
sample_elem = next(iter(batched_ds))
self.assertAllEqual(self.evaluate(sample_elem), [[bos, 1, 2, 3, eos, pad]])
def test_oov_token_correct(self):
vocab = ['A', 'B', 'C']
max_seq_len = 5
to_ids_fn = stackoverflow_dataset.build_to_ids_fn(vocab, max_seq_len)
_, oov_token, _, _ = stackoverflow_dataset.get_special_tokens(len(vocab))
data = {'tokens': 'A B D'}
processed = to_ids_fn(data)
self.assertEqual(self.evaluate(processed)[3], oov_token)
class BatchAndSplitTest(tf.test.TestCase):
def test_batch_and_split_fn_returns_dataset_with_correct_type_spec(self):
token = tf.constant([[0, 1, 2, 3, 4]], dtype=tf.int64)
ds = tf.data.Dataset.from_tensor_slices(token)
padded_and_batched = stackoverflow_dataset.batch_and_split(
ds, max_seq_len=6, batch_size=1)
self.assertIsInstance(padded_and_batched, tf.data.Dataset)
self.assertEqual(padded_and_batched.element_spec, (tf.TensorSpec(
[None, 6], dtype=tf.int64), tf.TensorSpec([None, 6], dtype=tf.int64)))
def test_batch_and_split_fn_returns_dataset_yielding_expected_elements(self):
token = tf.constant([[0, 1, 2, 3, 4]], dtype=tf.int64)
ds = tf.data.Dataset.from_tensor_slices(token)
padded_and_batched = stackoverflow_dataset.batch_and_split(
ds, max_seq_len=6, batch_size=1)
num_elems = 0
for elem in padded_and_batched:
self.assertAllEqual(
self.evaluate(elem[0]), np.array([[0, 1, 2, 3, 4, 0]], np.int64))
self.assertAllEqual(
self.evaluate(elem[1]), np.array([[1, 2, 3, 4, 0, 0]], np.int64))
num_elems += 1
self.assertEqual(num_elems, 1)
class DatasetPreprocessFnTest(tf.test.TestCase):
def test_train_preprocess_fn_return_dataset_element_spec(self):
ds = tf.data.Dataset.from_tensor_slices(TEST_DATA)
train_preprocess_fn = stackoverflow_dataset.create_train_dataset_preprocess_fn(
client_batch_size=32,
client_epochs_per_round=1,
max_seq_len=10,
max_training_elements_per_user=100,
vocab=['one', 'must'])
train_preprocessed_ds = train_preprocess_fn(ds)
self.assertEqual(train_preprocessed_ds.element_spec,
(tf.TensorSpec(shape=[None, 10], dtype=tf.int64),
tf.TensorSpec(shape=[None, 10], dtype=tf.int64)))
def test_test_preprocess_fn_return_dataset_element_spec(self):
ds = tf.data.Dataset.from_tensor_slices(TEST_DATA)
test_preprocess_fn = stackoverflow_dataset.create_test_dataset_preprocess_fn(
max_seq_len=10, vocab=['one', 'must'])
test_preprocessed_ds = test_preprocess_fn(ds)
self.assertEqual(test_preprocessed_ds.element_spec,
(tf.TensorSpec(shape=[None, 10], dtype=tf.int64),
tf.TensorSpec(shape=[None, 10], dtype=tf.int64)))
def test_train_preprocess_fn_returns_correct_sequence(self):
ds = tf.data.Dataset.from_tensor_slices(TEST_DATA)
train_preprocess_fn = stackoverflow_dataset.create_train_dataset_preprocess_fn(
client_batch_size=32,
client_epochs_per_round=1,
max_seq_len=6,
max_training_elements_per_user=100,
vocab=['one', 'must'])
train_preprocessed_ds = train_preprocess_fn(ds)
element = next(iter(train_preprocessed_ds))
# BOS is len(vocab)+2, EOS is len(vocab)+3, pad is 0, OOV is len(vocab)+1
self.assertAllEqual(
self.evaluate(element[0]), np.array([[4, 1, 2, 3, 5, 0]]))
def test_test_preprocess_fn_returns_correct_sequence(self):
ds = tf.data.Dataset.from_tensor_slices(TEST_DATA)
test_preprocess_fn = stackoverflow_dataset.create_test_dataset_preprocess_fn(
max_seq_len=6, vocab=['one', 'must'])
test_preprocessed_ds = test_preprocess_fn(ds)
element = next(iter(test_preprocessed_ds))
# BOS is len(vocab)+2, EOS is len(vocab)+3, pad is 0, OOV is len(vocab)+1
self.assertAllEqual(
self.evaluate(element[0]), np.array([[4, 1, 2, 3, 5, 0]]))
@test.skip_test_for_gpu
def test_take_with_repeat(self):
so_train, _, _ = stackoverflow_dataset.construct_word_level_datasets(
vocab_size=1000,
client_batch_size=10,
client_epochs_per_round=-1,
max_batches_per_user=8,
max_seq_len=20,
max_training_elements_per_user=128,
num_validation_examples=500)
for i in range(10):
client_ds = so_train.create_tf_dataset_for_client(so_train.client_ids[i])
self.assertEqual(_compute_length_of_dataset(client_ds), 8)
@test.skip_test_for_gpu
def test_raises_no_repeat_and_no_take(self):
with self.assertRaisesRegex(
ValueError, 'Argument client_epochs_per_round is set to -1'):
stackoverflow_dataset.construct_word_level_datasets(
vocab_size=100,
client_batch_size=10,
client_epochs_per_round=-1,
max_batches_per_user=-1,
max_seq_len=20,
max_training_elements_per_user=128,
num_validation_examples=500)
if __name__ == '__main__':
tf.test.main()
|
py | b416b9b6be308dc6721d9cbc285cacfa8e5e530a | # local modules
from .. import TemplateResource
from ..resource import P_JUNOS_ACTIVE, P_JUNOS_EXISTS
from ... import jxml as JXML
from .j2 import _J2LDR
# template files located in the ./templates directory
_RD_TEMPLATE = 'nat_static_simple__rd.j2.xml'
_WR_TEMPLATE = 'nat_static_simple__wr.j2.xml'
# dictionary of resource name items and associated XPath
_XPATH_NAMES = dict(
ruleset_name='nat/static/rule-set',
rule_name='nat/static/rule-set/rule'
)
class NatStaticSimple(TemplateResource):
PROPERTIES = [
'zone_from',
'dst_ip_addr',
'dst_port',
'src_ip_addr',
'src_port',
'port' # if set, will be used for [dst_port,src_port]
]
def __init__(self, junos, name=None, **kvargs):
TemplateResource.__init__(self, junos, name, **kvargs)
self._xpath_names = _XPATH_NAMES
self._j2_ldr = _J2LDR
self._j2_rd = _RD_TEMPLATE
self._j2_wr = _WR_TEMPLATE
if self.is_mgr:
return
self._name = self._r_xpath_names(name)
def _r_xpath_names(self, name):
if isinstance(name, str):
# if given a string, then default all the names to the same value
return dict(ruleset_name=name, rule_name=name)
if isinstance(name, dict):
# otherwise the name is a dictionary of the individual template
# names
t_names = dict(name)
try:
t_names['rule_name'] = name['rule_name']
except KeyError:
if not t_names.get('rule_name'):
t_names['rule_name'] = name['ruleset_name']
return t_names
else:
raise RuntimeError("don't know what to do with resource name")
# -----------------------------------------------------------------------
# XML reading
# -----------------------------------------------------------------------
def _xml_to_py(self, as_xml, to_py):
"""
convert the read XML config into python dictionary
"""
# create a dictionary of names to XML elements
xml_ele = {}
xml_ele['ruleset_name'] = as_xml.find('.//static/rule-set')
e = as_xml.xpath(
'.//rule[name=$rule_name]',
rule_name=self._name['rule_name'])
xml_ele['rule_name'] = e[0] if len(e) else None
# set the exist/active status for each name
self._r_has_xml_status(xml_ele, to_py)
e = xml_ele['ruleset_name']
to_py['zone_from'] = e.find('from/zone').text
if xml_ele['rule_name'] is not None:
e = xml_ele['rule_name']
to_py['dst_ip_addr'] = e.find(
'.//destination-address/dst-addr').text
to_py['dst_port'] = e.find('.//destination-port/low').text
e = e.find('.//static-nat/prefix')
to_py['src_ip_addr'] = e.find('addr-prefix').text
to_py['src_port'] = e.find('mapped-port/low').text
return True
# -----------------------------------------------------------------------
# XML writing
# -----------------------------------------------------------------------
def _r_template_write_vars(self):
"""
~| OVERLOADS |~
"""
if 'port' in self.should:
# override the values in dst_port and src_port
port = self['port']
self['dst_port'] = port
self['src_port'] = port
return super(self.__class__, self)._r_template_write_vars()
|
bzl | b416b9f9e4b56f4a8a3ac7aaffab985ea3dd33a6 | #
# Description:
# Utilities for extracting information from pom xml trees.
#
load(":globals.bzl", "DOWNLOAD_PREFIX", "fetch_repo")
load(":packaging_type.bzl", "packaging_type")
load(":utils.bzl", "strings")
load(":xml.bzl", "xml")
# An enum of known labels
labels = struct(
# Structural tags
PROJECT = "project",
PARENT = "parent",
DEPENDENCY_MANAGEMENT = "dependencyManagement",
DEPENDENCIES = "dependencies",
DEPENDENCY = "dependency",
PROPERTIES = "properties",
# Identifiers
ARTIFACT_ID = "artifactId",
GROUP_ID = "groupId",
VERSION = "version",
TYPE = "type",
SCOPE = "scope",
OPTIONAL = "optional",
CLASSIFIER = "classifier",
SYSTEM_PATH = "systemPath",
PACKAGING = "packaging", # The same as type, but in the main section.
)
def _process_dependency(dep_node):
group_id = None
artifact_id = None
version = None
type = None
optional = False
scope = None
classifier = None
system_path = None
for c in dep_node.children:
if c.label == labels.GROUP_ID:
group_id = c.content
elif c.label == labels.ARTIFACT_ID:
artifact_id = c.content
elif c.label == labels.VERSION:
version = c.content
elif c.label == labels.CLASSIFIER:
classifier = c.content
elif c.label == labels.TYPE:
type = c.content
elif c.label == labels.SCOPE:
scope = c.content
elif c.label == labels.OPTIONAL:
optional = strings.trim(c.content).lower() == "true"
elif c.label == labels.SYSTEM_PATH:
system_path = c.content
return _dependency(
group_id = group_id,
artifact_id = artifact_id,
version = version,
type = type,
optional = optional,
scope = scope,
classifier = classifier,
system_path = system_path,
)
def _dependency(
group_id,
artifact_id,
version = None,
type = None,
optional = None,
scope = None,
classifier = None,
system_path = None):
return struct(
group_id = group_id,
artifact_id = artifact_id,
version = version,
type = type,
optional = optional,
scope = scope,
classifier = classifier,
system_path = system_path,
coordinates = "%s:%s" % (group_id, artifact_id),
)
# A set of property defaults for dependencies, to be merged at the last minute. Omits the groupId
# and artifactId so it will blow up if used anywhere but in the final merge step.
_DEPENDENCY_DEFAULT = struct(
version = None,
type = packaging_type.DEFAULT.name,
optional = False,
scope = "compile",
classifier = None,
system_path = None,
)
# Extracts dependency coordinates from a given <dependencies> node of a pom node.
# The parameter should be an xml node containing the tag <dependencies>
def _extract_dependencies(parent_node):
node = xml.find_first(parent_node, labels.DEPENDENCIES)
return [_process_dependency(x) for x in node.children] if bool(node) else []
# Extracts dependency coordinates from a given <dependencies> node within a <dependencyManagement> node of a pom node.
# The parameter should be an xml node containing the tag <dependencyManagement>
def _extract_dependency_management(project_node):
node = xml.find_first(project_node, labels.DEPENDENCY_MANAGEMENT)
return _extract_dependencies(node) if bool(node) else []
def _merge_dependency(fallback, dependency):
if not bool(fallback):
return dependency
# If this were a node tree, we could re-use the leaf element merging, but it's a struct. :(
# groupid and artifactid are always present, so don't bother merging them.
return _dependency(
group_id = dependency.group_id,
artifact_id = dependency.artifact_id,
version = dependency.version if bool(dependency.version) else fallback.version,
type = dependency.type if bool(dependency.type) else fallback.type,
optional = dependency.optional if bool(dependency.optional) else fallback.optional,
scope = dependency.scope if bool(dependency.scope) else fallback.scope,
classifier = dependency.classifier if bool(dependency.classifier) else fallback.classifier,
system_path = dependency.system_path if bool(dependency.system_path) else fallback.system_path,
)
def _get_variable(string):
start = string.find("${")
end = string.find("}")
return string[start + 2:end] if start >= 0 and end >= 0 and start < end - 3 else None
def _substitute_variable_if_present(string, properties):
if not bool(string):
return None
variable_label = _get_variable(string)
if bool(variable_label):
substitution = properties.get(variable_label, None)
if bool(substitution):
string = string.replace("${%s}" % variable_label, substitution)
return string
def _apply_property(dependency, properties):
# For now just check <version> and <groupId>
return _dependency(
group_id = _substitute_variable_if_present(dependency.group_id, properties),
artifact_id = dependency.artifact_id,
version = _substitute_variable_if_present(dependency.version, properties),
type = dependency.type,
optional = dependency.optional,
scope = dependency.scope,
classifier = dependency.classifier,
system_path = dependency.system_path,
)
# Merges any information from <dependencyManagement> and <properties> sections and returns the
# resulting processed dependencies.
def _get_processed_dependencies(project_node):
result = []
dependency_management = {}
for dep in _extract_dependency_management(project_node):
dependency_management[dep.coordinates] = dep
dependencies = _extract_dependencies(project_node)
properties = _extract_properties(project_node)
for dep in dependencies:
dep = _merge_dependency(dependency_management.get(dep.coordinates, None), dep)
dep = _apply_property(dep, properties)
dep = _merge_dependency(_DEPENDENCY_DEFAULT, dep) # fill in any needed default values.
result.append(dep)
return result
# Extracts artifact_id for the project metadata given.
# The parameter should be the project node of a parsed xml document tree, returned by poms.parse(xml_text). Any
# metadata merging (inherited metadata) should be done before calling this, to avoid missing out inherited packaging.
def _extract_artifact_id(node):
for child_node in node.children:
if child_node.label == "artifactId":
return child_node.content
return None
# Extracts packaging specification for the project metadata given.
# The parameter should be the project node of a parsed xml document tree, returned by poms.parse(xml_text). Any
# metadata merging (inherited metadata) should be done before calling this, to avoid missing out inherited packaging.
def _extract_packaging(project):
if project.label != "project":
fail("Attempted to extract a packaging tag from a %s node, instead of a <project> node." % project.label)
for node in project.children:
if node.label == labels.PACKAGING:
if bool(node.content):
return node.content
return packaging_type.DEFAULT.name
def _process_parent(dep_node):
group_id = None
artifact_id = None
version = None
for c in dep_node.children:
if c.label == labels.GROUP_ID:
group_id = c.content
elif c.label == labels.ARTIFACT_ID:
artifact_id = c.content
elif c.label == labels.VERSION:
version = c.content
return struct(
group_id = group_id,
artifact_id = artifact_id,
version = version,
type = "pom", # Parent POMs must be pure metadata artifacts (only a .pom, no .jar/.aar, etc.)
original_spec = "%s:%s:%s:pom" % (group_id, artifact_id, version),
classifier = None,
)
# Extracts parent specification for the supplied pom file.
# The parameter should be the project node of a parsed xml document tree, returned by poms.parse(xml_text)
def _extract_parent(project):
if project.label != "project":
fail("Attempted to extract a parent tag from a %s node, instead of a <project> node." % project.label)
for node in project.children:
if node.label == labels.PARENT:
return _process_parent(node)
return None
def _extract_properties(project):
if project.label != "project":
fail("Attempted to extract properties from a %s node, instead of a <project> node." % project.label)
properties_nodes = []
for node in project.children:
if node.label == labels.PROPERTIES:
properties_nodes = node.children
properties = {}
for node in properties_nodes:
properties[node.label] = node.content
# Generate maven's implicit properties from the project itself.
for label in [labels.GROUP_ID, labels.ARTIFACT_ID, labels.VERSION]:
node = xml.find_first(project, label)
if bool(node):
properties["project.%s" % label] = node.content
return properties
def _format_dependency(dep):
result = "%s:%s:%s" % (dep.group_id, dep.artifact_id, dep.version)
if bool(dep.classifier):
type = dep.type if bool(dep.type) else packaging_type.DEFAULT.name
result = "%s:%s" % (result, dep.type)
elif bool(dep.type) and not dep.type == packaging_type.DEFAULT.name:
result = "%s:%s" % (result, dep.type)
return result
def _parse(xml_text):
root = xml.parse(xml_text)
project = xml.find_first(root, "project")
if not bool(project):
fail("No <project> tag found in supplied xml: %s" % xml_text)
return project
# A pom-specific node-merge algorith,
def _merge_content_last_wins(a, b):
if not bool(a):
return b
if not bool(b):
return a
if a.label != b.label:
fail("Attempt to merge to different pom elements: %s, %s", (a, b))
return xml.new_node(
label = a.label,
content = b.content if bool(b.content) else a.content,
)
# This could be 100% reusable, except for the limit on recursion. The strategy can't loop back and call this. :/
def _merge_leaf_elements(parent_list, child_list):
index = {}
merged = []
for i in range(len(parent_list)):
merged.append(parent_list[i])
index[parent_list[i].label] = i
for i in range(len(child_list)):
index_of_property = index.get(child_list[i].label, -1)
if index_of_property >= 0:
merged[index_of_property] = child_list[i]
else:
merged.append(child_list[i])
index[child_list[i].label] = len(merged)
return merged
def _children_if_exists(node):
return node.children if bool(node) else []
def _merge_properties_section(parent_node, child_node):
if not bool(parent_node):
return child_node
if not bool(child_node):
return parent_node
children = _merge_leaf_elements(
_children_if_exists(xml.find_first(parent_node, labels.PROPERTIES)),
_children_if_exists(xml.find_first(child_node, labels.PROPERTIES)),
)
return xml.new_node(label = labels.PROPERTIES, children = children)
# Description:
# Merges the dependency section of the pom. This makes an assumption that deps sections won't have both the main
# artifact for a group_id/artifact_id pair AND one of the same pom's classified artifacts. It is possible, and in
# those cases, the deps will be wrong and the build snippet should be explicitly substituted.
def _merge_dependency_section(parent, child):
if not bool(parent):
return child if bool(child) else xml.new_node(label = labels.DEPENDENCIES)
if not bool(child):
return parent if bool(parent) else xml.new_node(label = labels.DEPENDENCIES)
if parent.label != labels.DEPENDENCIES:
fail("Parent node in merged dependency operation not a <dependencies> node: %s" % parent)
if child.label != labels.DEPENDENCIES:
fail("Child node in merged dependency operation not a <dependencies> node: %s" % child)
# index the <dependency> nodes by groupId:artifactId
parent_deps = {}
for node in _children_if_exists(parent):
key = "%s:%s" % (xml.find_first(node, labels.GROUP_ID), xml.find_first(node, labels.ARTIFACT_ID))
parent_deps[key] = node
child_deps = {}
for node in _children_if_exists(child):
key = "%s:%s" % (xml.find_first(node, labels.GROUP_ID), xml.find_first(node, labels.ARTIFACT_ID))
child_deps[key] = node
merged = {}
for key in parent_deps:
merged[key] = parent_deps[key]
for key in child_deps:
if bool(merged.get(key, None)):
existing_node = merged[key]
merged[key] = xml.new_node(
label = labels.DEPENDENCY,
children = _merge_leaf_elements(
_children_if_exists(merged[key]),
_children_if_exists(child_deps[key]),
),
)
else:
merged[key] = child_deps[key]
dependency_items = []
for key, node in merged.items():
dependency_items.append(node)
return xml.new_node(
label = labels.DEPENDENCIES,
children = dependency_items,
)
# A highly constrained merge (only merges core metadata, properties, dependency_management, and dependency sections.
# It drops all other sections on the floor, including parent pom metadata. It is also not as efficient as it could be,
# because pom sections are unordered, so there's a lot of scanning. It also requires lots of targetted methods since
# starlark has no recursion, so this code cannot be generalized without becoming a hellish batch of insane iteration.
def _merge_parent(parent, child):
merged = xml.new_node(label = labels.PROJECT, children = [])
# merge core identity metadata
for label in [labels.GROUP_ID, labels.ARTIFACT_ID, labels.VERSION]:
merged_node = _merge_content_last_wins(xml.find_first(parent, label), xml.find_first(child, label))
if bool(merged_node):
merged.children.append(merged_node)
# merge packaging with jar special cased.
child_packaging_node = xml.find_first(child, labels.PACKAGING)
merged.children.append(
child_packaging_node if bool(child_packaging_node) else (
xml.new_node(label = labels.PACKAGING, content = packaging_type.DEFAULT.name)
),
)
# merge properties
merged.children.append(_merge_properties_section(parent, child))
# merge dependencies
merged.children.append(_merge_dependency_section(
xml.find_first(parent, labels.DEPENDENCIES),
xml.find_first(child, labels.DEPENDENCIES),
))
# merge dependencyManagement->dependencies
merged.children.append(xml.new_node(label = labels.DEPENDENCY_MANAGEMENT, children = [
_merge_dependency_section(
xml.find_first(parent, labels.DEPENDENCY_MANAGEMENT, labels.DEPENDENCIES),
xml.find_first(child, labels.DEPENDENCY_MANAGEMENT, labels.DEPENDENCIES),
),
]))
return merged
# Builds a chain of nodes representing pom xml data in a hierarchical inheritance relationship which
# can be collapsed or wrapped.
def _get_inheritance_chain(ctx, artifact):
inheritance_chain = []
current = artifact
for _ in range(100): # Can't use recursion, so just iterate
if not bool(current):
ctx.report_progress("Merging poms for %s" % artifact.original_spec)
return inheritance_chain
path = ctx.path(fetch_repo.pom_target_relative_to(current, fetch_repo.pom_repo_name(artifact)))
ctx.report_progress("Reading pom for %s" % current.original_spec)
xml_text = ctx.read(path)
ctx.report_progress("Parsing pom for %s" % current.original_spec)
current_node = _parse(xml_text)
inheritance_chain += [current_node]
current = _extract_parent(current_node)
fail("Iterations exceeded. %s has more than 100 super-poms." % artifact.original_spec)
# Take an inheritance chain of xml trees (Fetched by _get_inheritance_chain) and merge them from
# the top (the end of the list) to the bottom (the beginning of the list)
def _merge_inheritance_chain(inheritance_chain):
merged = inheritance_chain.pop()
for next in reversed(inheritance_chain):
merged = _merge_parent(parent = merged, child = next)
return merged
# Get the merged metadata for the pom.xml and its parents.
def _get_project_metadata(ctx, artifact):
# The two steps are extracted for testability.
return _merge_inheritance_chain(_get_inheritance_chain(ctx, artifact))
for_testing = struct(
get_variable = _get_variable,
substitute_variable = _substitute_variable_if_present,
merge_inheritance_chain = _merge_inheritance_chain,
get_inheritance_chain = _get_inheritance_chain,
)
# Exposes the namespace for the poms functions.
poms = struct(
# Returns an xml element tree of the supplied pom text corresponding to the project
parse = _parse,
extract_artifact_id = _extract_artifact_id,
# Returns a list of structs containing each dependency declared pom xml tree.
extract_dependencies = _get_processed_dependencies,
# Returns a list of structs each dependency declared in the dependencyManagement of the pom xml tree.
extract_dependency_management = _extract_dependency_management,
# Returns a struct containing the critical elements of a parent in the pom tree, sutable for pom fetching.
extract_parent = _extract_parent,
# Returns a dictionary containing the properties of the pom xml tree.
extract_properties = _extract_properties,
# Returns a string representing the packaging type of the pom (jar, aar, etc.)
extract_packaging = _extract_packaging,
# Returns a string representation of the supplied dependency
format_dependency = _format_dependency,
# Merges a parent pom xml tree with a child xml tree.
merge_parent = _merge_parent,
# Get the effective (inheritance-merged) pom node tree, against which other functions in this struct can be
# performed. This function assumes that a fetch.pom_rule has been setup for the artifact in question, that it has
# been passed to the rules invoking this function, and will fail with a missing workspace if that is not the case.
get_project_metadata = _get_project_metadata,
)
|
py | b416ba38063e7943273fde6f41673bd1f52ac19a | import logging
import os
import cv2
import numpy as np
import pickle
from skimage.feature import hog
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.svm import LinearSVC
logging.basicConfig(format='%(asctime)s : %(message)s', level=logging.DEBUG)
logger = logging.getLogger(__name__)
MODEL_CHECKPOINT = 'data/model.p'
class Params(object):
def __init__(self, color_space, hog_params, size, nbins, heatmap_threshold):
self.color_space = color_space
self.hog_params = hog_params
self.nbins = nbins
self.size = size
self.heatmap_threshold = heatmap_threshold
def spatial(img, size):
return cv2.resize(img, size).ravel()
def color_histogram(img, bins):
ch = []
for i in range(img.shape[2]):
ch.append(np.histogram(img[:, :, i], bins=bins))
return np.concatenate((ch[0][0], ch[1][0], ch[2][0]))
def hog_features(img, params):
output = []
for ch in range(img.shape[2]):
feat = hog(img[:,:,ch], **params)
output.append(feat)
return output
def extract_features(path, params):
logger.debug('[extract_features] start...')
features = []
for filename in path:
img = cv2.imread(filename, cv2.IMREAD_COLOR)
if params.color_space:
feature_image = cv2.cvtColor(img, params.color_space)
spatial_feat = spatial(feature_image, params.size)
hist_feat = color_histogram(feature_image, params.nbins)
hog_feat = np.ravel(hog_features(feature_image, params.hog_params))
features.append(np.concatenate((spatial_feat, hist_feat, hog_feat)))
return features
def train(car_features, non_car_features):
logger.debug('[train] start')
x = np.vstack((car_features, non_car_features)).astype(np.float64)
scaler = StandardScaler().fit(x)
scaled_x = scaler.transform(x)
y = np.hstack((np.ones(len(car_features)),
np.zeros(len(non_car_features))))
split_params = {
'test_size': 0.2,
'random_state': np.random.randint(0, 100)
}
x_train, x_test, y_train, y_test = train_test_split(scaled_x, y, **split_params)
clf = LinearSVC()
clf.fit(x_train, y_train)
accuracy = clf.score(x_test, y_test)
logger.debug('[train] accuracy = %s' % accuracy)
return clf, scaler
|
py | b416ba4fc775209e859d620ff6d371e24a4a82f9 | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
__all__ = [
'mesh_add_vertex_to_face_edge',
'mesh_insert_vertex_on_edge'
]
def mesh_add_vertex_to_face_edge(mesh, key, fkey, v):
"""Add an existing vertex of the mesh to an existing face.
Parameters
----------
mesh : :class:`compas.datastructures.Mesh`
The mesh data structure.
key : int
The identifier of the vertex.
fkey : int
The identifier of the face.
v : int
The identifier of the vertex before which the new vertex should be added.
Returns
-------
None
Notes
-----
The algorithm is merely there for convenience.
It does not check if the resulting mesh is still valid.
Examples
--------
Consider the following points and one face definition and the resulting mesh.
>>> from compas.datastructures import Mesh
>>> points = [[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [1.0, 1.0, 0.0], [0.0, 1.0, 0.0], [0.5, 0.0, 0.0]]
>>> faces = [[0, 1, 2, 3]]
>>> mesh = Mesh.from_vertices_and_faces(points, faces)
>>> len(mesh.face_vertices(0))
4
>>> mesh.vertex_degree(4)
0
To add the isolated vertex to the single mesh face
>>> mesh_add_vertex_to_face_edge(mesh, 4, 0, 1)
>>> len(mesh.face_vertices(0))
5
>>> mesh.vertex_degree(4)
2
"""
vertices = mesh.face_vertices(fkey)
i = vertices.index(v)
u = vertices[i - 1]
vertices.insert(key, i - 1)
mesh.halfedge[u][key] = fkey
mesh.halfedge[key][v] = fkey
if u not in mesh.halfedge[key]:
mesh.halfedge[key][u] = None
if key not in mesh.halfedge[v]:
mesh.halfedge[v][key] = None
del mesh.halfedge[u][v]
if u in mesh.halfedge[v]:
del mesh.halfedge[v][u]
if (u, v) in mesh.edgedata:
del mesh.edgedata[u, v]
if (v, u) in mesh.edgedata:
del mesh.edgedata[v, u]
def mesh_insert_vertex_on_edge(mesh, u, v, vkey=None):
"""Insert a vertex in the faces adjacent to an edge, between the two edge vertices.
If no vertex key is specified or if the key does not exist yet, a vertex is added and located at the edge midpoint.
If the vertex key exists, the position is not modified.
Parameters
----------
u: int
The first edge vertex.
v: int
The second edge vertex.
vkey: int, optional
The vertex key to insert.
Default is to auto-generate a new vertex identifier.
Returns
-------
int
The new vertex key.
Notes
-----
For two faces adjacent to an edge (a, b)
face_1 = [a, b, c] and
face_2 = [b, a, d]
applying
mesh_insert_vertex_on_edge(mesh, a, b, e)
yields the two new faces
face_1 = [a, e, b, c] and
face_2 = [b, e, a, d].
"""
# add new vertex if there is none or if vkey not in vertices
if vkey is None:
vkey = mesh.add_vertex(attr_dict={attr: xyz for attr, xyz in zip(
['x', 'y', 'z'], mesh.edge_midpoint(u, v))})
elif vkey not in list(mesh.vertices()):
vkey = mesh.add_vertex(key=vkey, attr_dict={attr: xyz for attr, xyz in zip(
['x', 'y', 'z'], mesh.edge_midpoint(u, v))})
# insert vertex
for fkey, halfedge in zip(mesh.edge_faces(u, v), [(u, v), (v, u)]):
if fkey is not None:
face_vertices = mesh.face_vertices(fkey)[:]
face_vertices.insert(face_vertices.index(halfedge[-1]), vkey)
mesh.delete_face(fkey)
mesh.add_face(face_vertices, fkey)
return vkey
|
py | b416bb2c8438d4c34879f9c550e826e54bd108cf | # (c) 2014, James Tanner <[email protected]>
# (c) 2014, James Cammarata, <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import tempfile
import pytest
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch
from ansible import errors
from ansible.parsing import vault
from ansible.parsing.vault import VaultLib, VaultEditor, match_encrypt_secret
from ansible.module_utils._text import to_bytes, to_text
from units.mock.vault_helper import TextVaultSecret
v10_data = """$ANSIBLE_VAULT;1.0;AES
53616c7465645f5fd0026926a2d415a28a2622116273fbc90e377225c12a347e1daf4456d36a77f9
9ad98d59f61d06a4b66718d855f16fb7bdfe54d1ec8aeaa4d06c2dc1fa630ae1846a029877f0eeb1
83c62ffb04c2512995e815de4b4d29ed"""
v11_data = """$ANSIBLE_VAULT;1.1;AES256
62303130653266653331306264616235333735323636616539316433666463323964623162386137
3961616263373033353631316333623566303532663065310a393036623466376263393961326530
64336561613965383835646464623865663966323464653236343638373165343863623638316664
3631633031323837340a396530313963373030343933616133393566366137363761373930663833
3739"""
@pytest.mark.skipif(not vault.HAS_CRYPTOGRAPHY,
reason="Skipping cryptography tests because cryptography is not installed")
class TestVaultEditor(unittest.TestCase):
def setUp(self):
self._test_dir = None
self.vault_password = "test-vault-password"
vault_secret = TextVaultSecret(self.vault_password)
self.vault_secrets = [('vault_secret', vault_secret),
('default', vault_secret)]
@property
def vault_secret(self):
return match_encrypt_secret(self.vault_secrets)[1]
def tearDown(self):
if self._test_dir:
pass
# shutil.rmtree(self._test_dir)
self._test_dir = None
def _secrets(self, password):
vault_secret = TextVaultSecret(password)
vault_secrets = [('default', vault_secret)]
return vault_secrets
def test_methods_exist(self):
v = vault.VaultEditor(None)
slots = ['create_file',
'decrypt_file',
'edit_file',
'encrypt_file',
'rekey_file',
'read_data',
'write_data']
for slot in slots:
assert hasattr(v, slot), "VaultLib is missing the %s method" % slot
def _create_test_dir(self):
suffix = '_ansible_unit_test_%s_' % (self.__class__.__name__)
return tempfile.mkdtemp(suffix=suffix)
def _create_file(self, test_dir, name, content=None, symlink=False):
file_path = os.path.join(test_dir, name)
opened_file = open(file_path, 'wb')
if content:
opened_file.write(content)
opened_file.close()
return file_path
def _vault_editor(self, vault_secrets=None):
if vault_secrets is None:
vault_secrets = self._secrets(self.vault_password)
return VaultEditor(VaultLib(vault_secrets))
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_empty_target(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
mock_sp_call.side_effect = self._faux_command
ve = self._vault_editor()
b_ciphertext = ve._edit_file_helper(src_file_path, self.vault_secret)
self.assertNotEqual(src_contents, b_ciphertext)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_call_exception(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
error_txt = 'calling editor raised an exception'
mock_sp_call.side_effect = errors.AnsibleError(error_txt)
ve = self._vault_editor()
self.assertRaisesRegexp(errors.AnsibleError,
error_txt,
ve._edit_file_helper,
src_file_path,
self.vault_secret)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_symlink_target(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
src_file_link_path = os.path.join(self._test_dir, 'a_link_to_dest_file')
os.symlink(src_file_path, src_file_link_path)
mock_sp_call.side_effect = self._faux_command
ve = self._vault_editor()
b_ciphertext = ve._edit_file_helper(src_file_link_path, self.vault_secret)
self.assertNotEqual(src_file_contents, b_ciphertext,
'b_ciphertext should be encrypted and not equal to src_contents')
def _faux_editor(self, editor_args, new_src_contents=None):
if editor_args[0] == 'shred':
return
tmp_path = editor_args[-1]
# simulate the tmp file being editted
tmp_file = open(tmp_path, 'wb')
if new_src_contents:
tmp_file.write(new_src_contents)
tmp_file.close()
def _faux_command(self, tmp_path):
pass
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_helper_no_change(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
# editor invocation doesn't change anything
def faux_editor(editor_args):
self._faux_editor(editor_args, src_file_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve._edit_file_helper(src_file_path, self.vault_secret, existing_data=src_file_contents)
new_target_file = open(src_file_path, 'rb')
new_target_file_contents = new_target_file.read()
self.assertEqual(src_file_contents, new_target_file_contents)
def _assert_file_is_encrypted(self, vault_editor, src_file_path, src_contents):
new_src_file = open(src_file_path, 'rb')
new_src_file_contents = new_src_file.read()
# TODO: assert that it is encrypted
self.assertTrue(vault.is_encrypted(new_src_file_contents))
src_file_plaintext = vault_editor.vault.decrypt(new_src_file_contents)
# the plaintext should not be encrypted
self.assertFalse(vault.is_encrypted(src_file_plaintext))
# and the new plaintext should match the original
self.assertEqual(src_file_plaintext, src_contents)
def _assert_file_is_link(self, src_file_link_path, src_file_path):
self.assertTrue(os.path.islink(src_file_link_path),
'The dest path (%s) should be a symlink to (%s) but is not' % (src_file_link_path, src_file_path))
def test_rekey_file(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
# FIXME: update to just set self._secrets or just a new vault secret id
new_password = 'password2:electricbugaloo'
new_vault_secret = TextVaultSecret(new_password)
new_vault_secrets = [('default', new_vault_secret)]
ve.rekey_file(src_file_path, vault.match_encrypt_secret(new_vault_secrets)[1])
# FIXME: can just update self._secrets here
new_ve = vault.VaultEditor(VaultLib(new_vault_secrets))
self._assert_file_is_encrypted(new_ve, src_file_path, src_file_contents)
def test_rekey_file_no_new_password(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
self.assertRaisesRegexp(errors.AnsibleError,
'The value for the new_password to rekey',
ve.rekey_file,
src_file_path,
None)
def test_rekey_file_not_encrypted(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
new_password = 'password2:electricbugaloo'
self.assertRaisesRegexp(errors.AnsibleError,
'input is not vault encrypted data',
ve.rekey_file,
src_file_path, new_password)
def test_plaintext(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
res = ve.plaintext(src_file_path)
self.assertEqual(src_file_contents, res)
def test_plaintext_not_encrypted(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
self.assertRaisesRegexp(errors.AnsibleError,
'input is not vault encrypted data',
ve.plaintext,
src_file_path)
def test_encrypt_file(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
self._assert_file_is_encrypted(ve, src_file_path, src_file_contents)
def test_encrypt_file_symlink(self):
self._test_dir = self._create_test_dir()
src_file_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_file_contents)
src_file_link_path = os.path.join(self._test_dir, 'a_link_to_dest_file')
os.symlink(src_file_path, src_file_link_path)
ve = self._vault_editor()
ve.encrypt_file(src_file_link_path, self.vault_secret)
self._assert_file_is_encrypted(ve, src_file_path, src_file_contents)
self._assert_file_is_encrypted(ve, src_file_link_path, src_file_contents)
self._assert_file_is_link(src_file_link_path, src_file_path)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_no_vault_id(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
new_src_contents = to_bytes("The info is different now.")
def faux_editor(editor_args):
self._faux_editor(editor_args, new_src_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
ve.edit_file(src_file_path)
new_src_file = open(src_file_path, 'rb')
new_src_file_contents = new_src_file.read()
self.assertTrue(b'$ANSIBLE_VAULT;1.1;AES256' in new_src_file_contents)
src_file_plaintext = ve.vault.decrypt(new_src_file_contents)
self.assertEqual(src_file_plaintext, new_src_contents)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_with_vault_id(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
new_src_contents = to_bytes("The info is different now.")
def faux_editor(editor_args):
self._faux_editor(editor_args, new_src_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret,
vault_id='vault_secrets')
ve.edit_file(src_file_path)
new_src_file = open(src_file_path, 'rb')
new_src_file_contents = new_src_file.read()
self.assertTrue(b'$ANSIBLE_VAULT;1.2;AES256;vault_secrets' in new_src_file_contents)
src_file_plaintext = ve.vault.decrypt(new_src_file_contents)
self.assertEqual(src_file_plaintext, new_src_contents)
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_symlink(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
new_src_contents = to_bytes("The info is different now.")
def faux_editor(editor_args):
self._faux_editor(editor_args, new_src_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
ve.encrypt_file(src_file_path, self.vault_secret)
src_file_link_path = os.path.join(self._test_dir, 'a_link_to_dest_file')
os.symlink(src_file_path, src_file_link_path)
ve.edit_file(src_file_link_path)
new_src_file = open(src_file_path, 'rb')
new_src_file_contents = new_src_file.read()
src_file_plaintext = ve.vault.decrypt(new_src_file_contents)
self._assert_file_is_link(src_file_link_path, src_file_path)
self.assertEqual(src_file_plaintext, new_src_contents)
# self.assertEqual(src_file_plaintext, new_src_contents,
# 'The decrypted plaintext of the editted file is not the expected contents.')
@patch('ansible.parsing.vault.subprocess.call')
def test_edit_file_not_encrypted(self, mock_sp_call):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
new_src_contents = to_bytes("The info is different now.")
def faux_editor(editor_args):
self._faux_editor(editor_args, new_src_contents)
mock_sp_call.side_effect = faux_editor
ve = self._vault_editor()
self.assertRaisesRegexp(errors.AnsibleError,
'input is not vault encrypted data',
ve.edit_file,
src_file_path)
def test_create_file_exists(self):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
ve = self._vault_editor()
self.assertRaisesRegexp(errors.AnsibleError,
'please use .edit. instead',
ve.create_file,
src_file_path,
self.vault_secret)
def test_decrypt_file_exception(self):
self._test_dir = self._create_test_dir()
src_contents = to_bytes("some info in a file\nyup.")
src_file_path = self._create_file(self._test_dir, 'src_file', content=src_contents)
ve = self._vault_editor()
self.assertRaisesRegexp(errors.AnsibleError,
'input is not vault encrypted data',
ve.decrypt_file,
src_file_path)
@patch.object(vault.VaultEditor, '_editor_shell_command')
def test_create_file(self, mock_editor_shell_command):
def sc_side_effect(filename):
return ['touch', filename]
mock_editor_shell_command.side_effect = sc_side_effect
tmp_file = tempfile.NamedTemporaryFile()
os.unlink(tmp_file.name)
_secrets = self._secrets('ansible')
ve = self._vault_editor(_secrets)
ve.create_file(tmp_file.name, vault.match_encrypt_secret(_secrets)[1])
self.assertTrue(os.path.exists(tmp_file.name))
def test_decrypt_1_0(self):
# Skip testing decrypting 1.0 files if we don't have access to AES, KDF or Counter.
v10_file = tempfile.NamedTemporaryFile(delete=False)
with v10_file as f:
f.write(to_bytes(v10_data))
ve = self._vault_editor(self._secrets("ansible"))
# make sure the password functions for the cipher
error_hit = False
try:
ve.decrypt_file(v10_file.name)
except errors.AnsibleError:
error_hit = True
raise
# verify decrypted content
f = open(v10_file.name, "rb")
fdata = to_text(f.read())
f.close()
os.unlink(v10_file.name)
assert error_hit is False, "error decrypting 1.0 file"
self.assertEqual(fdata.strip(), "foo")
assert fdata.strip() == "foo", "incorrect decryption of 1.0 file: %s" % fdata.strip()
def test_decrypt_1_1(self):
v11_file = tempfile.NamedTemporaryFile(delete=False)
with v11_file as f:
f.write(to_bytes(v11_data))
ve = self._vault_editor(self._secrets("ansible"))
# make sure the password functions for the cipher
error_hit = False
try:
ve.decrypt_file(v11_file.name)
except errors.AnsibleError:
error_hit = True
# verify decrypted content
f = open(v11_file.name, "rb")
fdata = to_text(f.read())
f.close()
os.unlink(v11_file.name)
assert error_hit is False, "error decrypting 1.1 file"
assert fdata.strip() == "foo", "incorrect decryption of 1.1 file: %s" % fdata.strip()
def test_rekey_migration(self):
v10_file = tempfile.NamedTemporaryFile(delete=False)
with v10_file as f:
f.write(to_bytes(v10_data))
ve = self._vault_editor(self._secrets("ansible"))
# make sure the password functions for the cipher
error_hit = False
new_secrets = self._secrets("ansible2")
try:
ve.rekey_file(v10_file.name, vault.match_encrypt_secret(new_secrets)[1])
except errors.AnsibleError:
error_hit = True
# verify decrypted content
f = open(v10_file.name, "rb")
fdata = f.read()
f.close()
assert error_hit is False, "error rekeying 1.0 file to 1.1"
# ensure filedata can be decrypted, is 1.1 and is AES256
vl = VaultLib(new_secrets)
dec_data = None
error_hit = False
try:
dec_data = vl.decrypt(fdata)
except errors.AnsibleError:
error_hit = True
os.unlink(v10_file.name)
self.assertIn(b'AES256', fdata, 'AES256 was not found in vault file %s' % to_text(fdata))
assert error_hit is False, "error decrypting migrated 1.0 file"
assert dec_data.strip() == b"foo", "incorrect decryption of rekeyed/migrated file: %s" % dec_data
def test_real_path_dash(self):
filename = '-'
ve = self._vault_editor()
res = ve._real_path(filename)
self.assertEqual(res, '-')
def test_real_path_dev_null(self):
filename = '/dev/null'
ve = self._vault_editor()
res = ve._real_path(filename)
self.assertEqual(res, '/dev/null')
def test_real_path_symlink(self):
self._test_dir = self._create_test_dir()
file_path = self._create_file(self._test_dir, 'test_file', content=b'this is a test file')
file_link_path = os.path.join(self._test_dir, 'a_link_to_test_file')
os.symlink(file_path, file_link_path)
ve = self._vault_editor()
res = ve._real_path(file_link_path)
self.assertEqual(res, file_path)
@pytest.mark.skipif(not vault.HAS_PYCRYPTO,
reason="Skipping pycrypto tests because pycrypto is not installed")
class TestVaultEditorPyCrypto(unittest.TestCase):
def setUp(self):
self.has_cryptography = vault.HAS_CRYPTOGRAPHY
vault.HAS_CRYPTOGRAPHY = False
super(TestVaultEditorPyCrypto, self).setUp()
def tearDown(self):
vault.HAS_CRYPTOGRAPHY = self.has_cryptography
super(TestVaultEditorPyCrypto, self).tearDown()
|
py | b416bc119d4784747e7b5f40611e18d106ec5ab6 | #!/usr/bin/python
from subprocess import *
from time import sleep, strftime
from datetime import datetime
import Adafruit_CharLCD
import Adafruit_GPIO as GPIO
import time
import string
rs = 26
rw = 19
en = 13
d4 = 12
d5 = 16
d6 = 20
d7 = 21
v0 = 6
cols = 8
lines = 2
gpio=GPIO.get_platform_gpio()
gpio.setup(rw, GPIO.OUT)
gpio.output(rw, False)
gpio.setup(v0, GPIO.OUT)
sleep(1)
lcd = Adafruit_CharLCD.Adafruit_CharLCD(rs, en, d4, d5, d6, d7, cols, lines)
lcd.clear()
cmd = "ip addr show wlan0 | grep -m 1 inet | awk '{print $2}' | cut -d/ -f1"
def run_cmd(cmd):
p = Popen(cmd, shell=True, stdout=PIPE)
output = p.communicate()[0]
return output
while True:
lcd.home()
ipaddr = run_cmd(cmd)
ips = string.split(ipaddr, '.')
if len(ips) == 4:
ip1 = '%s.%s' % (ips[0], ips[1])
ip2 = '%s.%s' % (ips[2], ips[3].rstrip())
ip1 = 'i%7s\n' % (ip1)
ip2 = 'p%7s\n' % (ip2)
lcd.message(ip1)
lcd.message(ip2)
else:
lcd.message(' no \n')
lcd.message('network!')
sleep(1)
|
py | b416bcb7c7284415092888d811faf5cecbaada99 | import TexSoup
ts = TexSoup.TexSoup(
r'''\exg.\label{gl:label}
\a. No gloss
\bg. This is a first gloss\\
Dies ist eine erste Glosse\\
''')
for el in ts.contents:
print(type(el))
if type(el) == TexSoup.data.TexNode:
print(repr(el.contents))
else:
print(repr(el))
|
py | b416bdd39de1a8dde3d2e2cb2ecd45a577bdc45d | # -*- coding: utf-8 -*-
import unittest
from scuec_auth.utils import random_string, encrypt_aes, decrypt_aes
class TestUitls(unittest.TestCase):
def test_encrypt(self):
s = random_string(64)+'hello world'
k = random_string(16)
d = encrypt_aes(s, k)
print(d)
s_ = decrypt_aes(d, k)[64:]
print(s_)
if __name__ == '__main__':
unittest.main() |
py | b416becacb993804579767044cbce3bafe9b75c2 | import graphene
class FormError(graphene.ObjectType):
key = graphene.String()
value = graphene.List(graphene.String)
def list_errors(errors):
return [FormError(k,v) for k,v in errors.items()]
|
py | b416bf7734c92b01ce864075baaf2cf4d68d4570 | #
# Copyright (c) 2008-2016 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class lbvserver_dospolicy_binding(base_resource) :
""" Binding class showing the dospolicy that can be bound to lbvserver.
"""
def __init__(self) :
self._policyname = None
self._priority = None
self._name = None
self.___count = None
@property
def priority(self) :
r"""Priority.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
r"""Priority.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def policyname(self) :
r"""Name of the policy bound to the LB vserver.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
r"""Name of the policy bound to the LB vserver.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def name(self) :
r"""Name for the virtual server. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at sign (@), equal sign (=), and hyphen (-) characters. Can be changed after the virtual server is created.
CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my vserver" or 'my vserver'). .<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
r"""Name for the virtual server. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at sign (@), equal sign (=), and hyphen (-) characters. Can be changed after the virtual server is created.
CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my vserver" or 'my vserver'). .<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
r""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(lbvserver_dospolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.lbvserver_dospolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
r""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name="", option_="") :
r""" Use this API to fetch lbvserver_dospolicy_binding resources.
"""
try :
if not name :
obj = lbvserver_dospolicy_binding()
response = obj.get_resources(service, option_)
else :
obj = lbvserver_dospolicy_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
r""" Use this API to fetch filtered set of lbvserver_dospolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbvserver_dospolicy_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
r""" Use this API to count lbvserver_dospolicy_binding resources configued on NetScaler.
"""
try :
obj = lbvserver_dospolicy_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
r""" Use this API to count the filtered set of lbvserver_dospolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = lbvserver_dospolicy_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Bindpoint:
REQUEST = "REQUEST"
RESPONSE = "RESPONSE"
class Labeltype:
reqvserver = "reqvserver"
resvserver = "resvserver"
policylabel = "policylabel"
class lbvserver_dospolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.lbvserver_dospolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.lbvserver_dospolicy_binding = [lbvserver_dospolicy_binding() for _ in range(length)]
|
py | b416bfc233bbe28273fbcacff041ac69342e770f | import sys
sys.path.append('..')
import transformedlazygreedy as tlg
import test
test.add(tlg.only_one, ({1,2}, {1,2,3}), False)
test.add(tlg.only_one, ({1,2},{1,3}), True)
test.add(tlg.eval_utility, ({1,2}, {1,2,3,4}, {4}), 2)
cs = (
{1,2,3},
{4,5},
{1,2,3,4}
)
events = {1,2,3,4,5}
test.add(tlg.lazy_greedy_msc, (cs, events), [({1,2,3,4}, 2), ({4,5}, 1)])
cs = (
{1,2},
{4,5},
{5}
)
events = {1,2,3,4,5}
test.add(tlg.lazy_greedy_msc, (cs, events), [({1,2}, 0), ({4,5}, 1)])
cs = (
{1,2},
{4,5}
)
events = {1,2,3,4,5}
test.add(tlg.lazy_greedy_msc, (cs, events), [({1,2}, 0), ({4,5}, 1)])
cs = (
{0,1},
{2},
)
events = {0,1,2}
res = (
[
{1,2},
{1,2}
],
{0,1,2}
)
test.add(tlg.mtc_to_msc, (cs, events), res)
test.run()
|
py | b416bfc688711f9ec8f970ca33bc6aa2811ec1bd | from uuid import uuid4
from django.db import models
class CoreModel(models.Model):
"""
A model for all app models.
Attributes:
timestamp (DateTimeField): the date that the data was saved. auto_now_add will save only once.
updated (DateTimeField): the date that the data was updated.
auto_now will auto update the data every time the object updated.
is_active (BooleanField): whither if the object is 'active' or not. defaults to True.
Note:
by setting Meta class's abstract attribute to True,
django will not make a table for this model.
"""
id = models.UUIDField(
primary_key=True,
default=uuid4,
editable=False)
timestamp = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
is_active = models.BooleanField(default=True)
class Meta:
abstract = True
|
py | b416c08eb013dd58cb21518c97356ff5bc9909ae | import pytest
def test_rnaseq_file_init(raw_files):
from genomic_data_service.rnaseq.domain.file import RnaSeqFile
rna_file = RnaSeqFile(raw_files[0], {})
assert isinstance(rna_file, RnaSeqFile)
assert rna_file.props == raw_files[0]
def test_rnaseq_file_url(raw_files):
from genomic_data_service.rnaseq.domain.file import RnaSeqFile
rna_file = RnaSeqFile(raw_files[0], {})
assert (
rna_file.url == 'https://www.encodeproject.org/files/ENCFF241WYH/@@download/ENCFF241WYH.tsv'
)
def test_rnaseq_file_path(raw_files):
from genomic_data_service.rnaseq.domain.file import RnaSeqFile
rna_file = RnaSeqFile(raw_files[0], {})
assert rna_file.path == '/tmp/ENCFF241WYH.tsv'
def test_rnaseq_file_get_expressions(raw_files, raw_expressions, mocker):
from genomic_data_service.rnaseq.domain.file import RnaSeqFile
rna_file = RnaSeqFile(raw_files[0], {})
mocker.patch(
'genomic_data_service.rnaseq.domain.file.get_expression_generator',
return_value=raw_expressions,
)
expressions = list(rna_file._get_expressions())
assert len(expressions) == 4
assert expressions[0].gene_id == 'ENSG00000034677.12'
assert expressions[1].transcript_ids == 'ENST00000042931.1,ENST00000549706.5,ENST00000552539.1,ENST00000553030.5'
assert expressions[2].tpm == 0.27
assert expressions[3].fpkm == 15.8
def test_rnaseq_file_get_indices_from_header():
from genomic_data_service.rnaseq.domain.file import get_indices_from_header
header = [
'gene_id',
'transcript_id(s)',
'length',
'effective_length',
'expected_count',
'TPM',
'FPKM',
'posterior_mean_count',
'posterior_standard_deviation_of_count',
'pme_TPM',
'pme_FPKM',
'TPM_ci_lower_bound',
'TPM_ci_upper_bound',
'TPM_coefficient_of_quartile_variation',
'FPKM_ci_lower_bound',
'FPKM_ci_upper_bound',
'FPKM_coefficient_of_quartile_variation',
]
assert get_indices_from_header(header) == [0, 1, 5, 6]
def test_rnaseq_file_get_values_from_row():
from genomic_data_service.rnaseq.domain.file import get_values_from_row
row = [0, 1, 2, 3, 4, 5]
assert get_values_from_row(row, [0, 2, 5]) == [0, 2, 5]
def test_rnaseq_file_get_expression_generator(local_quantification_tsv_path):
from genomic_data_service.rnaseq.domain.file import get_expression_generator
expressions = get_expression_generator(
'',
local_quantification_tsv_path
)
assert list(expressions) == [
['ENSG00000150873.11', 'ENST00000381585.7,ENST00000405022.3', '0.01', '0.02'],
['ENSG00000150893.10', 'ENST00000280481.8,ENST00000482551.1', '3.02', '4.69'],
['ENSG00000150907.7', 'ENST00000379561.5,ENST00000473775.1,ENST00000636651.1', '2.73', '4.23'],
['ENSG00000150938.9', 'ENST00000280527.6,ENST00000413985.1,ENST00000426856.1,ENST00000428774.1,ENST00000473403.5,ENST00000477491.5,ENST00000481321.1,ENST00000497236.1', '4.52', '7.01'],
['ENSG00000150961.14', 'ENST00000280551.10,ENST00000419654.6,ENST00000502526.1,ENST00000502830.1,ENST00000503683.1,ENST00000505134.5,ENST00000505280.1,ENST00000506622.5,ENST00000509818.5,ENST00000511033.1,ENST00000511481.5,ENST00000511715.1,ENST00000514418.1,ENST00000514561.5', '8.65', '13.42'],
['ENSG00000150967.17', 'ENST00000280560.12,ENST00000344275.11,ENST00000346530.9,ENST00000392439.7,ENST00000426173.6,ENST00000442028.6,ENST00000442833.6,ENST00000536976.5,ENST00000537276.5,ENST00000538895.5,ENST00000540285.5,ENST00000540971.5,ENST00000541424.1,ENST00000541983.1,ENST00000542448.5,ENST00000542678.5,ENST00000543935.1,ENST00000545373.1,ENST00000546077.1,ENST00000546289.5,ENST00000622723.1', '0.96', '1.49'],
['ENSG00000150977.10', 'ENST00000280571.9', '0.21', '0.32'],
['ENSG00000150990.7', 'ENST00000308736.6,ENST00000507267.2,ENST00000539298.1,ENST00000542400.5,ENST00000543962.1,ENST00000544745.1', '1.60', '2.48'],
['ENSG00000150991.14', 'ENST00000339647.5,ENST00000535131.1,ENST00000535859.1,ENST00000536661.1,ENST00000536769.1,ENST00000538617.5,ENST00000540351.1,ENST00000540700.1,ENST00000541272.1,ENST00000541645.1,ENST00000542416.1,ENST00000544481.1,ENST00000546120.2,ENST00000546271.1', '183.38', '284.55'],
['ENSG00000150995.19', 'ENST00000302640.12,ENST00000354582.11,ENST00000357086.10,ENST00000443694.4,ENST00000456211.8,ENST00000463980.6,ENST00000467056.6,ENST00000467545.6,ENST00000472205.1,ENST00000477577.2,ENST00000478515.2,ENST00000479831.1,ENST00000481415.2,ENST00000487016.1,ENST00000490572.1,ENST00000491868.2,ENST00000493491.6,ENST00000494681.5,ENST00000544951.6,ENST00000647624.1,ENST00000647673.1,ENST00000647685.1,ENST00000647708.1,ENST00000647717.1,ENST00000647900.1,ENST00000647997.1,ENST00000648016.1,ENST00000648038.1,ENST00000648208.1,ENST00000648212.1,ENST00000648266.1,ENST00000648309.1,ENST00000648390.1,ENST00000648431.1,ENST00000648510.1,ENST00000648564.1,ENST00000648770.1,ENST00000649015.1,ENST00000649051.1,ENST00000649139.1,ENST00000649144.1,ENST00000649272.1,ENST00000649314.1,ENST00000649414.1,ENST00000649425.1,ENST00000649430.1,ENST00000649669.1,ENST00000649694.1,ENST00000649767.1,ENST00000649908.1,ENST00000650074.1,ENST00000650079.1,ENST00000650139.1,ENST00000650146.1,ENST00000650294.1,ENST00000650552.1', '5.82', '9.04']
]
def test_rnaseq_file_get_expressions_local_file(local_quantification_tsv_path):
from genomic_data_service.rnaseq.domain.file import RnaSeqFile
from genomic_data_service.rnaseq.domain.expression import Expression
file_name = local_quantification_tsv_path.split('/')[-1]
base_path = local_quantification_tsv_path.replace(file_name, '')
props = {
'href': '/files/ENCFF241WYH/@@download/ENCFF241WYH.tsv',
}
rna_file = RnaSeqFile(props, {})
rna_file.BASE_PATH = base_path
rna_file.DOMAIN = ''
expressions = list(rna_file._get_expressions())
assert len(expressions) == 10
assert expressions[0] == Expression(
*[
'ENSG00000150873.11',
'ENST00000381585.7,ENST00000405022.3',
'0.01',
'0.02'
]
)
# This actually downloads a file
@pytest.mark.integration
def test_rnaseq_file_get_expressions_remote_file():
from genomic_data_service.rnaseq.domain.file import RnaSeqFile
from genomic_data_service.rnaseq.domain.expression import Expression
props = {
'href': '/files/ENCFF241WYH/@@download/ENCFF241WYH.tsv',
}
rna_file = RnaSeqFile(props, {})
expressions = list(rna_file._get_expressions())
assert len(expressions) == 59526
assert expressions[0] == Expression(
*[
'10904',
'10904',
'0.00',
'0.00'
]
)
def test_rnaseq_file_extract_file_properties(raw_files):
from genomic_data_service.rnaseq.domain.file import RnaSeqFile
rna_file = RnaSeqFile(raw_files[0], {})
rna_file._extract_file_properties()
assert rna_file._file_properties == {
'@id': '/files/ENCFF241WYH/',
'assay_title': 'polyA plus RNA-seq',
'assembly': 'GRCh38',
'biosample_ontology': {
'organ_slims': [
'musculature of body'
],
'term_name': 'muscle of trunk',
'synonyms': [
'torso muscle organ',
'trunk musculature',
'trunk muscle',
'muscle of trunk',
'muscle organ of torso',
'trunk muscle organ',
'muscle organ of trunk',
'body musculature'
],
'name': 'tissue_UBERON_0001774',
'term_id': 'UBERON:0001774',
'classification': 'tissue'
},
'dataset': '/experiments/ENCSR906HEV/',
'donors': [
'/human-donors/ENCDO676JUB/'
],
'genome_annotation': 'V29',
}
def test_rnaseq_file_extract_as_documents(raw_files, raw_expressions, mocker):
from genomic_data_service.rnaseq.domain.file import RnaSeqFile
mocker.patch(
'genomic_data_service.rnaseq.domain.file.get_expression_generator',
return_value=raw_expressions,
)
rna_file = RnaSeqFile(raw_files[0], {})
as_documents = list(rna_file.as_documents())
assert len(as_documents) == 4
assert as_documents[0] == {
'embedded': {
'expression': {
'gene_id': 'ENSG00000034677.12',
'transcript_ids': [
'ENST00000341084.6',
'ENST00000432381.2',
'ENST00000517584.5',
'ENST00000519342.1',
'ENST00000519449.5',
'ENST00000519527.5',
'ENST00000520071.1',
'ENST00000520903.1',
'ENST00000522182.1',
'ENST00000522369.5',
'ENST00000523167.1',
'ENST00000523255.5',
'ENST00000523481.5',
'ENST00000523644.1',
'ENST00000524233.1'
],
'tpm': 9.34,
'fpkm': 14.49
},
'file': {
'@id': '/files/ENCFF241WYH/',
'assay_title': 'polyA plus RNA-seq',
'assembly': 'GRCh38',
'biosample_ontology': {
'organ_slims': ['musculature of body'],
'term_name': 'muscle of trunk',
'synonyms': [
'torso muscle organ',
'trunk musculature',
'trunk muscle',
'muscle of trunk',
'muscle organ of torso',
'trunk muscle organ',
'muscle organ of trunk',
'body musculature'
],
'name': 'tissue_UBERON_0001774',
'term_id': 'UBERON:0001774',
'classification': 'tissue'
},
'dataset': '/experiments/ENCSR906HEV/',
'donors': [
'/human-donors/ENCDO676JUB/'
],
'genome_annotation': 'V29'
},
'dataset': {},
'gene': {},
'@id': '/expressions/ENCFF241WYH/ENSG00000034677.12/',
'@type': ['RNAExpression', 'Item'],
},
'_index': 'rna-expression',
'_type': 'rna-expression',
'principals_allowed': {
'view': ['system.Everyone']
},
'_id': '/expressions/ENCFF241WYH/ENSG00000034677.12/'
}
def test_rnaseq_file_extract_as_documents_with_repository(raw_files, raw_expressions, mocker, repositories):
from genomic_data_service.rnaseq.domain.file import RnaSeqFile
mocker.patch(
'genomic_data_service.rnaseq.domain.file.get_expression_generator',
return_value=raw_expressions,
)
rna_file = RnaSeqFile(raw_files[0], repositories)
as_documents = list(rna_file.as_documents())
assert len(as_documents) == 4
assert as_documents[0] == {
'embedded': {
'expression': {
'gene_id': 'ENSG00000034677.12',
'transcript_ids': [
'ENST00000341084.6',
'ENST00000432381.2',
'ENST00000517584.5',
'ENST00000519342.1',
'ENST00000519449.5',
'ENST00000519527.5',
'ENST00000520071.1',
'ENST00000520903.1',
'ENST00000522182.1',
'ENST00000522369.5',
'ENST00000523167.1',
'ENST00000523255.5',
'ENST00000523481.5',
'ENST00000523644.1',
'ENST00000524233.1'
],
'tpm': 9.34,
'fpkm': 14.49
},
'file': {
'@id': '/files/ENCFF241WYH/',
'assay_title': 'polyA plus RNA-seq',
'assembly': 'GRCh38',
'biosample_ontology': {
'organ_slims': [
'musculature of body'
],
'term_name': 'muscle of trunk',
'synonyms': [
'torso muscle organ',
'trunk musculature',
'trunk muscle',
'muscle of trunk',
'muscle organ of torso',
'trunk muscle organ',
'muscle organ of trunk',
'body musculature'
],
'name': 'tissue_UBERON_0001774',
'term_id': 'UBERON:0001774',
'classification': 'tissue'
},
'dataset': '/experiments/ENCSR906HEV/',
'donors': [
'/human-donors/ENCDO676JUB/'
],
'genome_annotation': 'V29'
},
'dataset': {
'@id': '/experiments/ENCSR906HEV/',
'biosample_summary': 'muscle of trunk tissue female embryo (113 days)',
'replicates': [
{
'library': {
'biosample': {
'age_units': 'day',
'sex': 'female',
'age': '113',
'donor': {
'organism': {
'scientific_name': 'Homo sapiens'
}
}
}
}
}
]
},
'gene': {
'geneid': '25897',
'symbol': 'RNF19A',
'name': 'ring finger protein 19A, RBR E3 ubiquitin protein ligase',
'synonyms': [
'DKFZp566B1346',
'RNF19',
'dorfin'
],
'@id': '/genes/25897/',
'title': 'RNF19A (Homo sapiens)'
},
'@id': '/expressions/ENCFF241WYH/ENSG00000034677.12/',
'@type': ['RNAExpression', 'Item'],
},
'_index': 'rna-expression',
'_type': 'rna-expression',
'principals_allowed': {
'view': ['system.Everyone']
},
'_id': '/expressions/ENCFF241WYH/ENSG00000034677.12/'
}
|
py | b416c180d701a85760d71ae12fdf7ab38cd780a6 | """
This module lets you practice one form of the ACCUMULATOR pattern,
namely, the "IN GRAPHICS" form which features:
-- DRAWING OBJECTS via ACCUMULATING positions and/or sizes,
as in: x = x + pixels
Additionally, it emphasizes that you must
** DO A CONCRETE EXAMPLE BY HAND **
before you can implement a solution to the problem in Python.
Authors: David Mutchler, Vibha Alangar, Matt Boutell, Dave Fisher, Mark Hays,
Aaron Wilkin, their colleagues, and Sam Hedrick.
""" # DONE: 1. PUT YOUR NAME IN THE ABOVE LINE.
import rosegraphics as rg
# -----------------------------------------------------------------------------
# Students: As you work each of these problems, ask yourself:
# 1. Do I need a loop?
# If so, HOW MANY LOOPS?
#
# 2. Where I need a loop, what needs to happen:
# -- BEFORE the loop?
# -- IN the loop?
# -- AFTER the loop?
# -----------------------------------------------------------------------------
def main():
""" Calls the TEST functions in this module. """
# run_test_draw_parallel_lines()
run_test_draw_lines()
def run_test_draw_parallel_lines():
""" Tests the draw_parallel_lines function. """
print()
print('--------------------------------------------------')
print('Testing the draw_parallel_lines function:')
print(' See the graphics windows that pop up.')
print('--------------------------------------------------')
# -------------------------------------------------------------------------
# TWO tests on ONE window.
# -------------------------------------------------------------------------
title = 'Tests 1 and 2 of DRAW_PARALLEL_LINES:'
title = title + ' 4 long lines, 7 short lines'
window1 = rg.RoseWindow(600, 350, title)
# Test 1:
left_most_point = rg.Point(400, 50)
draw_parallel_lines(7, left_most_point, 100, window1)
# Test 2:
left_most_point = rg.Point(50, 200)
draw_parallel_lines(4, left_most_point, 300, window1)
window1.close_on_mouse_click()
# -------------------------------------------------------------------------
# A third test on ANOTHER window.
# -------------------------------------------------------------------------
title = 'Test 3 of DRAW_PARALLEL_LINES: 12 very long lines!'
window2 = rg.RoseWindow(500, 400, title)
# Test 3:
left_most_point = rg.Point(20, 20)
draw_parallel_lines(12, left_most_point, 470, window2)
window2.close_on_mouse_click()
def draw_parallel_lines(n, point, length, window):
"""
What comes in: The four arguments are:
-- A positive integer n.
-- An rg.Point.
-- A positive integer length.
-- An rg.RoseWindow.
What goes out: Nothing (i.e., None).
Side effects:
See draw_parallel_lines.pdf in this project for pictures
that may help you better understand the following specification:
Draws n rg.Lines parallel to each other,
all on the given rg.RoseWindow, such that:
-- The first rg.Line has its left-most end at the given rg.Point.
-- Each rg.Line is a horizontal line
(i.e., parallel to the x-axis).
-- Each rg.Line has the given length.
-- Each rg.Line is 30 pixels below the previous rg.Line.
Must ** render ** but ** NOT close ** the window.
Type hints:
:type n: int
:type point: rg.Point
:type length: int
:type window: rg.RoseWindow
"""
# -------------------------------------------------------------------------
# DONE: 2. Implement and test this function.
# Tests have been written for you (above).
#
# CONSIDER using the ACCUMULATOR IN GRAPHICS pattern,
# as in draw_row_of_circles in m1e,
# instead of directly using the loop variable.
#
###########################################################################
# HINT: To figure out the code that computes the necessary
# endpoints for each line,
# ** FIRST DO A CONCRETE EXAMPLE BY HAND! **
###########################################################################
# -------------------------------------------------------------------------
bPoint = rg.Point(point.x, point.y)
ePoint = rg.Point(point.x + length, point.y)
for k in range(n + 1):
line = rg.Line(bPoint, ePoint)
line.attach_to(window)
bPoint = rg.Point(point.x, point.y + 30 * k)
ePoint = rg.Point(point.x + length, point.y + 30 * k)
window.render()
def run_test_draw_lines():
""" Tests the draw_lines function. """
print()
print('--------------------------------------------------')
print('Testing the draw_lines function:')
print(' See the graphics windows that pop up.')
print('--------------------------------------------------')
# TWO tests on ONE window.
title = 'Tests 1 & 2 of DRAW_LINES: 4 lines, 12 lines!'
window1 = rg.RoseWindow(350, 400, title)
draw_lines(4, rg.Point(20, 120), window1)
draw_lines(12, rg.Point(150, 230), window1)
window1.close_on_mouse_click()
# A third test on ANOTHER window.
window2 = rg.RoseWindow(350, 300, 'Test 3 of DRAW_LINES: 7 lines!')
draw_lines(7, rg.Point(50, 120), window2)
window2.close_on_mouse_click()
def draw_lines(n, point, window):
"""
What comes in: The three arguments are:
-- A integer n that is at least 2.
-- An rg.Point.
-- An rg.RoseWindow.
What goes out: Nothing (i.e., None).
Side effects:
See draw_lines.pdf in this project for pictures that
may help you better understand the following specification:
Draws n rg.Lines on the given rg.RoseWindow, such that:
-- The leftmost point of each of the rg.Lines
is the given rg.Point.
-- For the rightmost point of each of the lines:
-- Its x-coordinate is (pX + 100),
where pX is the x-coordinate of the given rg.Point.
-- The y-coordinates of the lines vary evenly
from (pY - 100) to (pY + 100),
where pY is the y-coordinate of the given rg.Point.
Must ** render ** but ** NOT close ** the window.
Type hints:
:type n: int
:type point: rg.Point
:type window: rg.RoseWindow
"""
# -------------------------------------------------------------------------
# TODO: 3. Implement and test this function.
# Tests have been written for you (above).
#
# CONSIDER using the ACCUMULATOR IN GRAPHICS pattern,
# as in draw_row_of_circles in m1e,
# instead of directly using the loop variable.
#
###########################################################################
# HINT: To figure out the code that computes the necessary
# endpoints for each line,
# ** FIRST DO A CONCRETE EXAMPLE BY HAND! **
###########################################################################
# -------------------------------------------------------------------------
bPoint = rg.Point(point.x, point.y)
for k in range(n):
ePoint = rg.Point(point.x + 100, point.y - 100 + k * (200 / (n - 1)))
line = rg.Line(bPoint, ePoint)
line.attach_to(window)
window.render()
# -----------------------------------------------------------------------------
# Calls main to start the ball rolling.
# -----------------------------------------------------------------------------
main()
|
py | b416c316973a1b9485207cc20d86b29dd3d1dfd6 | #!/usr/bin/python
# Classification (U)
"""Program: elasticsearchdump_dump_db.py
Description: Unit testing of dump_db in elastic_class.ElasticSearchDump.
Usage:
test/unit/elastic_class/elasticsearchdump_dump_db.py
Arguments:
"""
# Libraries and Global Variables
# Standard
import sys
import os
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
# Third-party
import mock
# Local
sys.path.append(os.getcwd())
import elastic_class
import version
__version__ = version.__version__
class Repo(object):
"""Class: Repo
Description: Class representation of the snapshot class.
Methods:
get_repository
"""
def get_repository(self):
"""Method: get_repository
Description: Stub holder for snapshot.get_repository method.
Arguments:
"""
return {"reponame": {"type": "dbdump", "settings":
{"location": "/dir/path/dump"}}}
class Elasticsearch(object):
"""Class: ElasticSearch
Description: Class representation of the Elasticsearch class.
Methods:
__init__
info
"""
def __init__(self, host_list, port=9200):
"""Method: __init__
Description: Initialization instance of the class.
Arguments:
"""
self.hosts = host_list
self.port = port
self.info_status = {"cluster_name": "ClusterName",
"name": "servername"}
self.snapshot = Repo()
def info(self):
"""Method: info
Description: Stub holder for Elasticsearch.info method.
Arguments:
"""
return self.info_status
class UnitTest(unittest.TestCase):
"""Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp
test_no_repo_name
test_bad_db_name
test_default
"""
def setUp(self):
"""Function: setUp
Description: Initialization for unit testing.
Arguments:
"""
self.host_list = ["host1", "host2"]
self.host_str = "host1, host2"
self.repo = "reponame"
self.els = Elasticsearch(self.host_list)
self.dbs = "dbname"
self.dbs2 = ["dbname"]
self.nodes_data = {"serverid1": {"name": "hostname1", "settings":
{"path": {"data": ["/dir/data1"],
"logs": ["/dir/logs1"]}}},
"serverid2": {"name": "hostname2", "settings":
{"path": {"data": ["/dir/data2"],
"logs": ["/dir/logs2"]}}}}
self.health_data = {"status": "green", "cluster_name": "ClusterName"}
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.create_snapshot", mock.Mock())
@mock.patch("elastic_class.get_cluster_nodes",
mock.Mock(return_value={"_nodes": {"total": 3}}))
@mock.patch("elastic_class.get_master_name",
mock.Mock(return_value="MasterName"))
@mock.patch("elastic_class.get_info",
mock.Mock(return_value={"name": "localservername"}))
@mock.patch("elastic_class.elastic_libs.get_latest_dump",
mock.Mock(side_effect=["dump2", "dump3"]))
@mock.patch("elastic_class.ElasticSearchDump._chk_status",
mock.Mock(return_value=(False, None, True)))
@mock.patch("elastic_class.get_cluster_health")
@mock.patch("elastic_class.get_nodes")
@mock.patch("elastic_class.get_dump_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_no_repo_name(self, mock_es, mock_list, mock_nodes, mock_health):
"""Function: test_no_repo_name
Description: Test with no repo name set.
Arguments:
"""
mock_es.return_value = self.els
mock_list.side_effect = [["dump1", "dump2"],
["dump1", "dump2", "dump3"]]
mock_nodes.return_value = self.nodes_data
mock_health.return_value = self.health_data
els = elastic_class.ElasticSearchDump(self.host_list, repo=self.repo)
els.connect()
els.repo_name = None
self.assertEqual(
els.dump_db(self.dbs),
(True, "ERROR: Repository name not set."))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.create_snapshot", mock.Mock())
@mock.patch("elastic_class.get_cluster_nodes",
mock.Mock(return_value={"_nodes": {"total": 3}}))
@mock.patch("elastic_class.get_master_name",
mock.Mock(return_value="MasterName"))
@mock.patch("elastic_class.get_info",
mock.Mock(return_value={"name": "localservername"}))
@mock.patch("elastic_class.elastic_libs.get_latest_dump",
mock.Mock(side_effect=["dump2", "dump3"]))
@mock.patch("elastic_class.ElasticSearchDump._chk_status",
mock.Mock(return_value=(False, None, True)))
@mock.patch("elastic_class.get_cluster_health")
@mock.patch("elastic_class.get_nodes")
@mock.patch("elastic_class.get_dump_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_bad_db_name(self, mock_es, mock_list, mock_nodes, mock_health):
"""Function: test_bad_db_name
Description: Test with bad database name.
Arguments:
"""
mock_es.return_value = self.els
mock_list.side_effect = [["dump1", "dump2"],
["dump1", "dump2", "dump3"]]
mock_nodes.return_value = self.nodes_data
mock_health.return_value = self.health_data
els = elastic_class.ElasticSearchDump(self.host_list, repo=self.repo)
els.connect()
self.assertEqual(
els.dump_db(self.dbs2),
(True, "ERROR: Database name(s) is not a string: ['dbname']"))
@mock.patch("elastic_class.is_active", mock.Mock(return_value=True))
@mock.patch("elastic_class.create_snapshot", mock.Mock())
@mock.patch("elastic_class.get_cluster_nodes",
mock.Mock(return_value={"_nodes": {"total": 3}}))
@mock.patch("elastic_class.get_master_name",
mock.Mock(return_value="MasterName"))
@mock.patch("elastic_class.get_info",
mock.Mock(return_value={"name": "localservername"}))
@mock.patch("elastic_class.elastic_libs.get_latest_dump",
mock.Mock(side_effect=["dump2", "dump3"]))
@mock.patch("elastic_class.ElasticSearchDump._chk_status",
mock.Mock(return_value=(False, None, True)))
@mock.patch("elastic_class.get_cluster_health")
@mock.patch("elastic_class.get_nodes")
@mock.patch("elastic_class.get_dump_list")
@mock.patch("elastic_class.elasticsearch.Elasticsearch")
def test_default(self, mock_es, mock_list, mock_nodes, mock_health):
"""Function: test_default
Description: Test with default settings.
Arguments:
"""
mock_es.return_value = self.els
mock_list.side_effect = [["dump1", "dump2"],
["dump1", "dump2", "dump3"]]
mock_nodes.return_value = self.nodes_data
mock_health.return_value = self.health_data
els = elastic_class.ElasticSearchDump(self.host_list, repo=self.repo)
els.connect()
self.assertEqual(els.dump_db(self.dbs), (False, None))
if __name__ == "__main__":
unittest.main()
|
py | b416c443fcac415087025aebf990bec5988caf58 | # Copyright (C) 2019-2021, TomTom (http://tomtom.com).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Modules for generating AsciiDoc output as part of the preprocessing."""
from .asciidoc import process_adoc, Context
from .errors import AsciiDocError
from .filters import InsertionFilter
__all__ = ["process_adoc", "AsciiDocError", "Context", "InsertionFilter"]
|
py | b416c4d69dfa21860993ef185ed13e80e43d947b | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from __future__ import print_function
import platform
from azure.cli.core.commands.arm import resource_exists
from knack.log import get_logger
from knack.util import CLIError
logger = get_logger(__name__)
def get_folded_parameter_help_string(
display_name, allow_none=False, allow_new=False, default_none=False,
other_required_option=None):
""" Assembles a parameterized help string for folded parameters. """
quotes = '""' if platform.system() == 'Windows' else "''"
if default_none and not allow_none:
raise CLIError('Cannot use default_none=True and allow_none=False')
if not allow_new and not allow_none and not default_none:
help_text = 'Name or ID of an existing {}.'.format(display_name)
elif not allow_new and allow_none and not default_none:
help_text = 'Name or ID of an existing {}, or {} for none.'.format(display_name, quotes)
elif allow_new and not allow_none and not default_none:
help_text = 'Name or ID of the {}. Will create resource if it does not exist.'.format(
display_name)
elif allow_new and allow_none and not default_none:
help_text = 'Name or ID of the {}, or {} for none. Uses existing resource if available or will create a new ' \
'resource with defaults if omitted.'
help_text = help_text.format(display_name, quotes)
elif not allow_new and allow_none and default_none:
help_text = 'Name or ID of an existing {}, or none by default.'.format(display_name)
elif allow_new and allow_none and default_none:
help_text = 'Name or ID of a {}. Uses existing resource or creates new if specified, or none if omitted.'
help_text = help_text.format(display_name)
# add parent name option string (if applicable)
if other_required_option:
help_text = '{} If name specified, also specify {}'.format(help_text, other_required_option)
return help_text
def _validate_name_or_id(
cli_ctx, resource_group_name, property_value, property_type, parent_value, parent_type):
from azure.cli.core.commands.client_factory import get_subscription_id
from msrestazure.tools import parse_resource_id, is_valid_resource_id
has_parent = parent_type is not None
if is_valid_resource_id(property_value):
resource_id_parts = parse_resource_id(property_value)
value_supplied_was_id = True
elif has_parent:
resource_id_parts = dict(
name=parent_value,
resource_group=resource_group_name,
namespace=parent_type.split('/')[0],
type=parent_type.split('/')[1],
subscription=get_subscription_id(cli_ctx),
child_name_1=property_value,
child_type_1=property_type)
value_supplied_was_id = False
else:
resource_id_parts = dict(
name=property_value,
resource_group=resource_group_name,
namespace=property_type.split('/')[0],
type=property_type.split('/')[1],
subscription=get_subscription_id(cli_ctx))
value_supplied_was_id = False
return (resource_id_parts, value_supplied_was_id)
def get_folded_parameter_validator(
property_name, property_type, property_option,
parent_name=None, parent_type=None, parent_option=None,
allow_none=False, allow_new=False, default_none=False):
# Ensure that all parent parameters are specified if any are
parent_params = [parent_name, parent_type, parent_option]
has_parent = any(parent_params)
if has_parent and not all(parent_params):
raise CLIError('All parent parameters must be specified (name, type, option) if any are.')
if default_none and not allow_none:
raise CLIError('Cannot use default_none=True if allow_none=False')
# construct the validator
def validator(cmd, namespace):
from msrestazure.tools import resource_id
type_field_name = '{}_type'.format(property_name)
property_val = getattr(namespace, property_name, None)
parent_val = getattr(namespace, parent_name, None) if parent_name else None
# Check for the different scenarios (order matters)
# 1) provided value indicates None (pair of empty quotes)
if property_val in ('', '""', "''") or (property_val is None and default_none):
if not allow_none:
raise CLIError('{} cannot be None.'.format(property_option))
setattr(namespace, type_field_name, 'none')
setattr(namespace, property_name, None)
if parent_name and parent_val:
logger.warning('Ignoring: %s %s', parent_option, parent_val)
setattr(namespace, parent_name, None)
return # SUCCESS
# Create a resource ID we can check for existence.
(resource_id_parts, value_was_id) = _validate_name_or_id(
cmd.cli_ctx, namespace.resource_group_name, property_val, property_type, parent_val, parent_type)
# 2) resource exists
if resource_exists(cmd.cli_ctx, **resource_id_parts):
setattr(namespace, type_field_name, 'existingId')
setattr(namespace, property_name, resource_id(**resource_id_parts))
if parent_val:
if value_was_id:
logger.warning('Ignoring: %s %s', parent_option, parent_val)
setattr(namespace, parent_name, None)
return # SUCCESS
# if a parent name was required but not specified, raise a usage error
if has_parent and not value_was_id and not parent_val and not allow_new:
raise ValueError('incorrect usage: {0} ID | {0} NAME {1} NAME'.format(
property_option, parent_option))
# if non-existent ID was supplied, throw error depending on whether a new resource can
# be created.
if value_was_id:
usage_message = '{} NAME'.format(property_option) if not has_parent \
else '{} NAME [{} NAME]'.format(property_option, parent_option)
action_message = 'Specify ( {} ) to create a new resource.'.format(usage_message) if \
allow_new else 'Create the required resource and try again.'
raise CLIError('{} {} does not exist. {}'.format(
property_name, property_val, action_message))
# 3) try to create new resource
if allow_new:
setattr(namespace, type_field_name, 'new')
else:
raise CLIError(
'{} {} does not exist. Create the required resource and try again.'.format(
property_name, property_val))
return validator
|
py | b416c5dbda88e8c1c036f3c156d7e8d8bfaab27f | from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
db = SQLAlchemy()
migrate = Migrate()
class BoardGame(db.Model):
id = db.Column(db.BigInteger, primary_key=True)
game_id = db.Column(db.BigInteger, nullable = False)
name = db.Column(db.String(128), nullable = False)
minplayers = db.Column(db.Integer)
maxplayers = db.Column(db.Integer)
playingtime = db.Column(db.Integer)
description = db.Column(db.Text)
def parse_records(database_records):
"""
A helper method for converting a list of database record objects into a list of dictionaries, so they can be returned as JSON
Param: database_records (a list of db.Model instances)
Example: parse_records(User.query.all())
Returns: a list of dictionaries, each corresponding to a record, like...
[
{"id": 1, "title": "Book 1"},
{"id": 2, "title": "Book 2"},
{"id": 3, "title": "Book 3"},
]
"""
parsed_records = []
for record in database_records:
print(record)
parsed_record = record.__dict__
del parsed_record["_sa_instance_state"]
parsed_records.append(parsed_record)
return parsed_records |
py | b416c7d0e24bfa463b56bd553a9fe8b5868574ec | ###############################################################################
# Copyright (c) 2016 Salvatore Ventura <[email protected]>
#
# File: settings.py
#
# Author: Salvatore Ventura <[email protected]>
# Date: 07 Dec 2016
# Purpose: Global repository of common constants and values
#
# Revision: 1
# Comment: What's new in revision 1
#
###############################################################################
import logging
API_ROOT = 'https://api.unsplash.com'
LIB_NAME = 'pyunsplash'
LOG_LEVEL = logging.ERROR
|
py | b416c8ce20b1ce845c77707398923e214faf6691 | class Solution:
def fib(self, n: int) -> int:
if n <= 1:
return n
a, b, c = 0, 1, 0
for i in range(2, n + 1):
t = c
c = a + b
a, b = b, t
return c % (1_000_000_000 + 7)
|
py | b416c8d035146edc68f0d7198f15aed0bc0093cd | _base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py'
# model settings
model = dict(
neck=[
dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
dict(
type='BFP',
in_channels=256,
num_levels=5,
refine_level=2,
refine_type='non_local')
],
roi_head=dict(
bbox_head=dict(
loss_bbox=dict(
_delete_=True,
type='BalancedL1Loss',
alpha=0.5,
gamma=1.5,
beta=1.0,
loss_weight=1.0))))
# model training and testing settings
train_cfg = dict(
rcnn=dict(
sampler=dict(
_delete_=True,
type='CombinedSampler',
num=512,
pos_fraction=0.25,
add_gt_as_proposals=True,
pos_sampler=dict(type='InstanceBalancedPosSampler'),
neg_sampler=dict(
type='IoUBalancedNegSampler',
floor_thr=-1,
floor_fraction=0,
num_bins=3))))
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
data = dict(
train=dict(proposal_file=data_root +
'libra_proposals/rpn_r50_fpn_1x_train2017.pkl'),
val=dict(proposal_file=data_root +
'libra_proposals/rpn_r50_fpn_1x_val2017.pkl'),
test=dict(proposal_file=data_root +
'libra_proposals/rpn_r50_fpn_1x_val2017.pkl'))
|
py | b416ca128f426d8bcb570120da603e8c30f06b80 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""service-management delete command."""
from googlecloudsdk.api_lib.service_management import base_classes
from googlecloudsdk.api_lib.service_management import services_util
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.core.console import console_io
from googlecloudsdk.third_party.apitools.base.py import exceptions as apitools_exceptions
class Delete(base.Command, base_classes.BaseServiceManagementCommand):
"""Deletes a service configuration given the service name."""
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
parser.add_argument(
'service', help='The service to delete for this producer project.')
parser.add_argument(
'--force',
'-f',
action='store_true',
default=False,
help='Force the deletion of the service without warning prompt.')
base.ASYNC_FLAG.AddToParser(parser)
def Run(self, args):
"""Run 'service-management delete'.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Returns:
The response from the Delete API call (or None if cancelled).
Raises:
HttpException: An http error response was received while executing api
request.
"""
# If the user doesn't specify --force, prompt with a warning before
# continuing.
if not args.force:
continue_prompt_response = console_io.PromptContinue(
message='Are you sure? This will permanently delete the service '
'configuration and all of the associated consumer '
'information. This CANNOT be undone!',
prompt_string='Continue anyway',
default=True,
throw_if_unattended=True)
if not continue_prompt_response:
return
request = self.services_messages.ServicemanagementServicesDeleteRequest(
serviceName=args.service,
)
try:
result = self.services_client.services.Delete(request)
except apitools_exceptions.HttpError as error:
raise exceptions.HttpException(services_util.GetError(error))
return services_util.ProcessOperationResult(result, args.async)
|
py | b416cabde25d9562f1f0bc77fa56b219248b3aaf | # -*- coding: utf-8 -*-
"""The app module, containing the app factory function."""
from flask import Flask, render_template
from myflaskapp import commands, public, user
from myflaskapp.extensions import bcrypt, cache, csrf_protect, db, debug_toolbar, login_manager, migrate, webpack
from myflaskapp.settings import ProdConfig
def create_app(config_object=ProdConfig):
"""An application factory, as explained here: http://flask.pocoo.org/docs/patterns/appfactories/.
:param config_object: The configuration object to use.
"""
app = Flask(__name__.split('.')[0])
app.config.from_object(config_object)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
register_shellcontext(app)
register_commands(app)
return app
def register_extensions(app):
"""Register Flask extensions."""
bcrypt.init_app(app)
cache.init_app(app)
db.init_app(app)
csrf_protect.init_app(app)
login_manager.init_app(app)
debug_toolbar.init_app(app)
migrate.init_app(app, db)
webpack.init_app(app)
return None
def register_blueprints(app):
"""Register Flask blueprints."""
app.register_blueprint(public.views.blueprint)
app.register_blueprint(user.views.blueprint)
return None
def register_errorhandlers(app):
"""Register error handlers."""
def render_error(error):
"""Render error template."""
# If a HTTPException, pull the `code` attribute; default to 500
error_code = getattr(error, 'code', 500)
return render_template('{0}.html'.format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
def register_shellcontext(app):
"""Register shell context objects."""
def shell_context():
"""Shell context objects."""
return {
'db': db,
'User': user.models.User}
app.shell_context_processor(shell_context)
def register_commands(app):
"""Register Click commands."""
app.cli.add_command(commands.test)
app.cli.add_command(commands.lint)
app.cli.add_command(commands.clean)
app.cli.add_command(commands.urls)
|
py | b416caf959d0188c78420a791aa8bd6deec12ef9 | # SPDX-Copyright: Copyright (c) Capital One Services, LLC
# SPDX-License-Identifier: Apache-2.0
# Copyright 2020 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
"""
Pure Python implementation of CEL.
Test the main CLI.
Python >= 3.9 preserves order of arguments defined in :mod:`argparse`.
Python < 3.9 alphabetizes the arguments. This makes string comparisons
challenging in expected results.
"""
import argparse
import io
import sys
from unittest.mock import Mock, call, sentinel
from pytest import *
import celpy
import celpy.__main__
from celpy import celtypes
@fixture
def mock_os_environ(monkeypatch):
monkeypatch.setitem(celpy.__main__.os.environ, "OS_ENV_VAR", "3.14")
def test_arg_type_value(mock_os_environ):
"""GIVEN arg values; WHEN parsing; THEN correct interpretation."""
assert celpy.__main__.arg_type_value("name:int=42") == (
"name",
celtypes.IntType,
42,
)
assert celpy.__main__.arg_type_value("OS_ENV_VAR") == (
"OS_ENV_VAR",
celtypes.StringType,
"3.14",
)
assert celpy.__main__.arg_type_value("OS_ENV_VAR:double") == (
"OS_ENV_VAR",
celtypes.DoubleType,
3.14,
)
with raises(argparse.ArgumentTypeError):
celpy.__main__.arg_type_value("name:type:value")
def test_get_options():
"""GIVEN verbose settings; WHEN parsing; THEN correct interpretation."""
options = celpy.__main__.get_options(["--arg", "name:int=42", "-n", "355./113."])
assert options.arg == [("name", celtypes.IntType, 42)]
assert options.null_input
assert options.expr == "355./113."
assert options.verbose == 0
options = celpy.__main__.get_options(["-v", "-n", '"hello world"'])
assert options.null_input
assert options.expr == '"hello world"'
assert options.verbose == 1
options = celpy.__main__.get_options(["-vv", ".doc.field * 42"])
assert not options.null_input
assert options.expr == ".doc.field * 42"
assert options.verbose == 2
def test_arg_type_bad(capsys):
"""GIVEN invalid arg values; WHEN parsing; THEN correct interpretation."""
with raises(SystemExit) as exc_info:
options = celpy.__main__.get_options(
["--arg", "name:nope=42", "-n", "355./113."]
)
assert exc_info.value.args == (2,)
out, err = capsys.readouterr()
assert err.splitlines() == [
"usage: celpy [-h] [-v] [-a ARG] [-n] [-s] [-i] [--json-package NAME]",
" [--json-document NAME] [-b] [-f FORMAT]",
" [expr]",
"celpy: error: argument -a/--arg: arg name:nope=42 type name not in ['int', "
"'uint', 'double', 'bool', 'string', 'bytes', 'list', 'map', 'null_type', "
"'single_duration', 'single_timestamp', 'int64_value', 'uint64_value', "
"'double_value', 'bool_value', 'string_value', 'bytes_value', 'number_value', "
"'null_value']",
]
def test_arg_value_bad(capsys):
"""GIVEN invalid arg values; WHEN parsing; THEN correct interpretation."""
with raises(SystemExit) as exc_info:
options = celpy.__main__.get_options(
["--arg", "name:int=nope", "-n", "355./113."]
)
assert exc_info.value.args == (2,)
out, err = capsys.readouterr()
assert err.splitlines() == [
"usage: celpy [-h] [-v] [-a ARG] [-n] [-s] [-i] [--json-package NAME]",
" [--json-document NAME] [-b] [-f FORMAT]",
" [expr]",
"celpy: error: argument -a/--arg: arg name:int=nope value invalid for the supplied type",
]
def test_arg_combo_bad(capsys):
"""GIVEN invalid arg combinations; WHEN parsing; THEN correct interpretation."""
error_prefix = [
"usage: celpy [-h] [-v] [-a ARG] [-n] [-s] [-i] [--json-package NAME]",
" [--json-document NAME] [-b] [-f FORMAT]",
" [expr]",
]
with raises(SystemExit) as exc_info:
options = celpy.__main__.get_options(
["-i", "-n", "355./113."]
)
assert exc_info.value.args == (2,)
out, err = capsys.readouterr()
assert err.splitlines() == error_prefix + [
"celpy: error: Interactive mode and an expression provided",
]
with raises(SystemExit) as exc_info:
options = celpy.__main__.get_options(
["-n"]
)
assert exc_info.value.args == (2,)
out, err = capsys.readouterr()
assert err.splitlines() == error_prefix + [
"celpy: error: No expression provided",
]
with raises(SystemExit) as exc_info:
options = celpy.__main__.get_options(
["-n", "--json-document=_", "--json-package=_"]
)
assert exc_info.value.args == (2,)
out, err = capsys.readouterr()
assert err.splitlines() == error_prefix + [
"celpy: error: Either use --json-package or --json-document, not both",
]
@fixture
def mock_cel_environment(monkeypatch):
mock_runner = Mock(evaluate=Mock(return_value=str(sentinel.OUTPUT)))
mock_env = Mock(
compile=Mock(return_value=sentinel.AST), program=Mock(return_value=mock_runner)
)
mock_env_class = Mock(return_value=mock_env)
monkeypatch.setattr(celpy.__main__, "Environment", mock_env_class)
return mock_env_class
def test_main_0(mock_cel_environment, caplog, capsys):
"""GIVEN null-input AND expression; WHEN eval; THEN correct internal object use."""
argv = ["--null-input", '"Hello world! I\'m " + name + "."']
status = celpy.__main__.main(argv)
assert status == 0
assert mock_cel_environment.mock_calls == [call(package=None, annotations=None)]
env = mock_cel_environment.return_value
assert env.compile.mock_calls == [call('"Hello world! I\'m " + name + "."')]
assert env.program.mock_calls == [call(sentinel.AST)]
prgm = env.program.return_value
assert prgm.evaluate.mock_calls == [call({})]
assert caplog.messages == []
out, err = capsys.readouterr()
assert out == '"sentinel.OUTPUT"\n'
assert err == ""
def test_main_1(mock_cel_environment, caplog, capsys):
"""GIVEN null-input AND arg AND expression; WHEN eval; THEN correct internal object use."""
argv = [
"--arg",
"name:string=CEL",
"--null-input",
'"Hello world! I\'m " + name + "."',
]
status = celpy.__main__.main(argv)
assert status == 0
assert mock_cel_environment.mock_calls == [
call(package=None, annotations={"name": celtypes.StringType})
]
env = mock_cel_environment.return_value
assert env.compile.mock_calls == [call('"Hello world! I\'m " + name + "."')]
assert env.program.mock_calls == [call(sentinel.AST)]
prgm = env.program.return_value
assert prgm.evaluate.mock_calls == [call({"name": "CEL"})]
assert caplog.messages == []
out, err = capsys.readouterr()
assert out == '"sentinel.OUTPUT"\n'
assert err == ""
def test_main_pipe(mock_cel_environment, caplog, capsys):
"""GIVEN JSON AND expression; WHEN eval; THEN correct internal object use."""
argv = ['"Hello world! I\'m " + name + "."']
sys.stdin = io.StringIO('{"name": "CEL"}\n')
status = celpy.__main__.main(argv)
sys.stdin = sys.__stdin__
assert status == 0
assert mock_cel_environment.mock_calls == [call(package="jq", annotations=None)]
env = mock_cel_environment.return_value
assert env.compile.mock_calls == [call('"Hello world! I\'m " + name + "."')]
assert env.program.mock_calls == [call(sentinel.AST)]
prgm = env.program.return_value
assert prgm.evaluate.mock_calls == [
call(
{
"jq": celtypes.MapType(
{celtypes.StringType("name"): celtypes.StringType("CEL")}
)
}
)
]
assert caplog.messages == []
out, err = capsys.readouterr()
assert out == '"sentinel.OUTPUT"\n'
assert err == ""
def test_main_0_non_boolean(mock_cel_environment, caplog, capsys):
"""
GIVEN null-input AND boolean option and AND non-bool expr
WHEN eval
THEN correct internal object use.
"""
argv = ["-bn", '"Hello world! I\'m " + name + "."']
status = celpy.__main__.main(argv)
assert status == 2
assert mock_cel_environment.mock_calls == [call(package=None, annotations=None)]
env = mock_cel_environment.return_value
assert env.compile.mock_calls == [call('"Hello world! I\'m " + name + "."')]
assert env.program.mock_calls == [call(sentinel.AST)]
prgm = env.program.return_value
assert prgm.evaluate.mock_calls == [call({})]
assert caplog.messages == [
"Expected celtypes.BoolType, got <class 'str'> = 'sentinel.OUTPUT'"
]
out, err = capsys.readouterr()
assert out == ""
assert err == ""
@fixture
def mock_cel_environment_false(monkeypatch):
mock_runner = Mock(evaluate=Mock(return_value=celtypes.BoolType(False)))
mock_env = Mock(
compile=Mock(return_value=sentinel.AST), program=Mock(return_value=mock_runner)
)
mock_env_class = Mock(return_value=mock_env)
monkeypatch.setattr(celpy.__main__, "Environment", mock_env_class)
return mock_env_class
def test_main_0_boolean(mock_cel_environment_false, caplog, capsys):
"""
GIVEN null-input AND boolean option AND false expr
WHEN eval
THEN correct internal object use.
"""
argv = ["-bn", "2 == 1"]
status = celpy.__main__.main(argv)
assert status == 1
assert mock_cel_environment_false.mock_calls == [
call(package=None, annotations=None)
]
env = mock_cel_environment_false.return_value
assert env.compile.mock_calls == [call("2 == 1")]
assert env.program.mock_calls == [call(sentinel.AST)]
prgm = env.program.return_value
assert prgm.evaluate.mock_calls == [call({})]
assert caplog.messages == []
out, err = capsys.readouterr()
assert out == ""
assert err == ""
@fixture
def mock_cel_environment_integer(monkeypatch):
mock_runner = Mock(evaluate=Mock(return_value=celtypes.IntType(3735928559)))
mock_env = Mock(
compile=Mock(return_value=sentinel.AST), program=Mock(return_value=mock_runner)
)
mock_env_class = Mock(return_value=mock_env)
monkeypatch.setattr(celpy.__main__, "Environment", mock_env_class)
return mock_env_class
def test_main_slurp_int_format(mock_cel_environment_integer, caplog, capsys):
"""
GIVEN JSON AND slurp option AND formatted output AND int expr
WHEN eval
THEN correct internal object use.
"""
argv = ["-s", "-f", "#8x", "339629869*11"]
sys.stdin = io.StringIO('{"name": "CEL"}\n')
status = celpy.__main__.main(argv)
sys.stdin = sys.__stdin__
assert status == 0
assert mock_cel_environment_integer.mock_calls == [
call(package='jq', annotations=None)
]
env = mock_cel_environment_integer.return_value
assert env.compile.mock_calls == [call("339629869*11")]
assert env.program.mock_calls == [call(sentinel.AST)]
prgm = env.program.return_value
assert prgm.evaluate.mock_calls == [
call({'jq': celtypes.MapType({celtypes.StringType('name'): celtypes.StringType('CEL')})})
]
assert caplog.messages == []
out, err = capsys.readouterr()
assert out == "0xdeadbeef\n"
assert err == ""
@fixture
def mock_cel_environment_bool(monkeypatch):
mock_runner = Mock(evaluate=Mock(return_value=celtypes.BoolType(False)))
mock_env = Mock(
compile=Mock(return_value=sentinel.AST), program=Mock(return_value=mock_runner)
)
mock_env_class = Mock(return_value=mock_env)
monkeypatch.setattr(celpy.__main__, "Environment", mock_env_class)
return mock_env_class
def test_main_slurp_bool_status(mock_cel_environment_bool, caplog, capsys):
"""
GIVEN JSON AND slurp option AND formatted output AND int expr
WHEN eval
THEN correct internal object use.
"""
argv = ["-s", "-b", '.name == "not CEL"']
sys.stdin = io.StringIO('{"name": "CEL"}\n')
status = celpy.__main__.main(argv)
sys.stdin = sys.__stdin__
assert status == 1
assert mock_cel_environment_bool.mock_calls == [
call(package='jq', annotations=None)
]
env = mock_cel_environment_bool.return_value
assert env.compile.mock_calls == [call('.name == "not CEL"')]
assert env.program.mock_calls == [call(sentinel.AST)]
prgm = env.program.return_value
assert prgm.evaluate.mock_calls == [
call({'jq': celtypes.MapType({celtypes.StringType('name'): celtypes.StringType('CEL')})})
]
assert caplog.messages == []
out, err = capsys.readouterr()
assert out == "false\n"
assert err == ""
def test_main_0_int_format(mock_cel_environment_integer, caplog, capsys):
"""
GIVEN slurp option AND formatted output AND int expr
WHEN eval
THEN correct internal object use.
"""
argv = ["-n", "-f", "#8x", "339629869*11"]
status = celpy.__main__.main(argv)
assert status == 0
assert mock_cel_environment_integer.mock_calls == [
call(package=None, annotations=None)
]
env = mock_cel_environment_integer.return_value
assert env.compile.mock_calls == [call("339629869*11")]
assert env.program.mock_calls == [call(sentinel.AST)]
prgm = env.program.return_value
assert prgm.evaluate.mock_calls == [call({})]
assert caplog.messages == []
out, err = capsys.readouterr()
assert out == "0xdeadbeef\n"
assert err == ""
def test_main_verbose(mock_cel_environment, caplog, capsys):
"""GIVEN verbose AND expression; WHEN eval; THEN correct log output."""
argv = ["-v", "[2, 4, 5].map(x, x/2)"]
status = celpy.__main__.main(argv)
assert status == 0
assert mock_cel_environment.mock_calls == [call(annotations=None, package="jq")]
assert caplog.messages == ["Expr: '[2, 4, 5].map(x, x/2)'"]
out, err = capsys.readouterr()
assert out == ""
assert err == ""
def test_main_very_verbose(mock_cel_environment, caplog, capsys):
"""GIVEN very verbose AND expression; WHEN eval; THEN correct log output."""
argv = ["-vv", "[2, 4, 5].map(x, x/2)"]
status = celpy.__main__.main(argv)
assert status == 0
assert mock_cel_environment.mock_calls == [call(annotations=None, package="jq")]
expected_namespace = argparse.Namespace(
verbose=2, arg=None, null_input=False, slurp=False, interactive=False,
package='jq', document=None,
boolean=False, format=None,
expr='[2, 4, 5].map(x, x/2)'
)
assert caplog.messages == [
str(expected_namespace),
"Expr: '[2, 4, 5].map(x, x/2)'",
]
out, err = capsys.readouterr()
assert out == ""
assert err == ""
@fixture
def mock_cel_environment_syntax_error(monkeypatch):
mock_runner = Mock(evaluate=Mock(return_value=str(sentinel.OUTPUT)))
mock_env = Mock(
compile=Mock(side_effect=celpy.CELParseError((sentinel.arg0, sentinel.arg1))),
cel_parser=Mock(error_text=Mock(return_value=sentinel.Formatted_Error)),
)
mock_env_class = Mock(return_value=mock_env)
monkeypatch.setattr(celpy.__main__, "Environment", mock_env_class)
return mock_env_class
def test_main_parse_error(mock_cel_environment_syntax_error, caplog, capsys):
"""GIVEN syntax error; WHEN eval; THEN correct stderr output."""
argv = ["-n", "[nope++]"]
status = celpy.__main__.main(argv)
assert status == 1
assert mock_cel_environment_syntax_error.mock_calls == [
call(package=None, annotations=None)
]
expected_namespace = argparse.Namespace(
verbose=0, arg=None, null_input=True, slurp=False, interactive=False,
package='jq', document=None,
boolean=False, format=None,
expr='[nope++]'
)
assert caplog.messages == [
str(expected_namespace),
"Expr: '[nope++]'",
]
out, err = capsys.readouterr()
assert out == ""
assert err == "sentinel.Formatted_Error\n"
@fixture
def mock_cel_environment_eval_error(monkeypatch):
mock_runner = Mock(
evaluate=Mock(side_effect=celpy.CELEvalError((sentinel.arg0, sentinel.arg1)))
)
mock_env = Mock(
compile=Mock(return_value=sentinel.AST),
program=Mock(return_value=mock_runner),
cel_parser=Mock(error_text=Mock(return_value=sentinel.Formatted_Error)),
)
mock_env_class = Mock(return_value=mock_env)
monkeypatch.setattr(celpy.__main__, "Environment", mock_env_class)
return mock_env_class
def test_main_0_eval_error(mock_cel_environment_eval_error, caplog, capsys):
"""GIVEN null input AND bad expression; WHEN eval; THEN correct stderr output."""
argv = ["-n", "2 / 0"]
status = celpy.__main__.main(argv)
assert status == 2
assert mock_cel_environment_eval_error.mock_calls == [
call(package=None, annotations=None)
]
expected_namespace = argparse.Namespace(
verbose=0, arg=None, null_input=True, slurp=False, interactive=False,
package='jq', document=None,
boolean=False, format=None,
expr='2 / 0'
)
assert caplog.messages == [
str(expected_namespace),
"Expr: '2 / 0'",
]
out, err = capsys.readouterr()
assert out == ""
assert err == "sentinel.Formatted_Error\n"
def test_main_pipe_eval_error(mock_cel_environment_eval_error, caplog, capsys):
"""GIVEN piped input AND bad expression; WHEN eval; THEN correct stderr output."""
argv = [".json.field / 0"]
sys.stdin = io.StringIO('{"name": "CEL"}\n')
status = celpy.__main__.main(argv)
sys.stdin = sys.__stdin__
assert status == 0
assert mock_cel_environment_eval_error.mock_calls == [
call(package="jq", annotations=None)
]
expected_namespace = argparse.Namespace(
verbose=0, arg=None, null_input=False, slurp=False, interactive=False,
package='jq', document=None,
boolean=False, format=None,
expr='.json.field / 0'
)
assert caplog.messages == [
str(expected_namespace),
"Expr: '.json.field / 0'",
"Encountered (sentinel.arg0, sentinel.arg1) on document '{\"name\": \"CEL\"}\\n'",
]
out, err = capsys.readouterr()
assert out == "null\n"
assert err == ""
def test_main_pipe_json_error(mock_cel_environment_eval_error, caplog, capsys):
"""GIVEN piped input AND bad expression; WHEN eval; THEN correct stderr output."""
argv = [".json.field / 0"]
sys.stdin = io.StringIO('nope, not json\n')
status = celpy.__main__.main(argv)
sys.stdin = sys.__stdin__
assert status == 3
assert mock_cel_environment_eval_error.mock_calls == [
call(package="jq", annotations=None)
]
expected_namespace = argparse.Namespace(
verbose=0, arg=None, null_input=False, slurp=False, interactive=False,
package='jq', document=None,
boolean=False, format=None,
expr='.json.field / 0'
)
assert caplog.messages == [
str(expected_namespace),
"Expr: '.json.field / 0'",
"Expecting value: line 1 column 1 (char 0) on document 'nope, not json\\n'",
]
out, err = capsys.readouterr()
assert out == ""
assert err == ""
def test_main_repl(monkeypatch, capsys):
mock_repl = Mock()
mock_repl_class = Mock(return_value=mock_repl)
monkeypatch.setattr(celpy.__main__, 'CEL_REPL', mock_repl_class)
argv = ["-i"]
status = celpy.__main__.main(argv)
assert status == 0
assert mock_repl_class.mock_calls == [
call()
]
assert mock_repl.cmdloop.mock_calls == [
call()
]
def test_repl_class_good_interaction(capsys):
"""
If any print() is added for debugging, this test is likely to break.
"""
c = celpy.__main__.CEL_REPL()
c.preloop()
assert c.state == {}
r_0 = c.onecmd("set pi 355./113.")
assert not r_0
r_1 = c.onecmd("show")
assert not r_1
r_2 = c.onecmd("pi * 2.")
assert not r_2
r_2 = c.onecmd("quit")
assert r_2
out, err = capsys.readouterr()
lines = out.splitlines()
assert lines[0].startswith("3.14159")
assert lines[1].startswith("{'pi': DoubleType(3.14159")
assert lines[2].startswith("6.28318")
assert c.state == {"pi": celpy.celtypes.DoubleType(355./113.)}
def test_repl_class_bad_interaction(capsys):
c = celpy.__main__.CEL_REPL()
c.preloop()
c.onecmd("set a pi ++ nope | not & proper \\ CEL")
c.onecmd("this! isn't! valid!!")
out, err = capsys.readouterr()
lines = err.splitlines()
assert (
lines[0] ==
"ERROR: <input>:1:5 pi ++ nope | not & proper \ CEL"
)
assert (
lines[4] ==
" | ....^"
)
assert c.state == {}
|
py | b416cb1aa8d350d50876de3d7b6d654e4fd18e56 | import datetime
from django.core.management.base import BaseCommand
from django.db import connection
from ktapp import models
MINIMUM_YEAR = 1920
class Command(BaseCommand):
help = 'Update profile segments'
def handle(self, *args, **options):
cursor = connection.cursor()
today = datetime.date.today()
this_year = today.year
self.stdout.write('Updating segments...')
# self.update_global_segment(cursor)
# for keyword in models.Keyword.objects.filter(keyword_type=models.Keyword.KEYWORD_TYPE_GENRE):
# self.update_keyword_segment(cursor, 'genre', keyword.id)
# self.update_segment_all(cursor, 'genre')
# for keyword in models.Keyword.objects.filter(keyword_type=models.Keyword.KEYWORD_TYPE_COUNTRY):
# self.update_keyword_segment(cursor, 'country', keyword.id)
# self.update_segment_all(cursor, 'country')
# for year in range(MINIMUM_YEAR - 10, this_year, 10):
# self.update_year_segment(cursor, year)
# self.update_segment_all(cursor, 'year')
# self.stdout.write('Updated segments.')
self.stdout.write('Updating usersegments...')
# cursor.execute('''
# TRUNCATE ktapp_userprofilesegment
# ''')
# self.update_global_usersegment(cursor)
# for keyword in models.Keyword.objects.filter(keyword_type=models.Keyword.KEYWORD_TYPE_GENRE):
# self.update_keyword_usersegment(cursor, 'genre', keyword.id)
# self.update_usersegment_all(cursor, 'genre')
# for keyword in models.Keyword.objects.filter(keyword_type=models.Keyword.KEYWORD_TYPE_COUNTRY):
# self.update_keyword_usersegment(cursor, 'country', keyword.id)
# self.update_usersegment_all(cursor, 'country')
# for year in range(MINIMUM_YEAR - 10, this_year, 10):
# self.update_year_usersegment(cursor, year)
# self.update_usersegment_all(cursor, 'year')
self.stdout.write('Updated usersegments.')
self.stdout.write('Updating scores...')
# cursor.execute('''
# UPDATE ktapp_userprofilesegment ups
# INNER JOIN ktapp_profilesegment ps
# SET ups.score = ROUND(100.0 * ups.ratio_of_films / ps.ratio_of_films - 100.0)
# WHERE
# ps.id = ups.segment_id
# ''')
self.stdout.write('Updated scores.')
def update_global_segment(self, cursor):
self.stdout.write('Global segment...')
segment, _ = models.ProfileSegment.objects.get_or_create(dimension='', segment=0)
cursor.execute('''
SELECT ROUND(1.0 / SUM(POW(1.0 * f.number_of_ratings / sf.sum_of_number_of_ratings, 2))) AS effective_number
FROM ktapp_film f
INNER JOIN (
SELECT SUM(number_of_ratings) AS sum_of_number_of_ratings FROM ktapp_film
) sf
''')
segment.effective_number_of_films = int(cursor.fetchone()[0])
segment.ratio_of_films = 10000
segment.save()
def update_global_usersegment(self, cursor):
self.stdout.write('Global usersegment...')
segment = models.ProfileSegment.objects.get(dimension='', segment=0)
cursor.execute('''
DELETE FROM ktapp_userprofilesegment
WHERE segment_id = {segment_id}
'''.format(segment_id=segment.id))
cursor.execute('''
INSERT INTO ktapp_userprofilesegment (user_id, segment_id, number_of_votes, relative_number_of_votes, ratio_of_films, score)
SELECT id, {segment_id}, number_of_ratings, ROUND(10000.0 * number_of_ratings / {effective_number_of_films}), 10000, 0
FROM ktapp_ktuser
'''.format(
segment_id=segment.id,
effective_number_of_films=segment.effective_number_of_films,
))
def update_keyword_segment(self, cursor, dimension, keyword_id):
self.stdout.write('Keyword segment %s:%s...' % (dimension, keyword_id))
if dimension == 'genre':
dim_field = 'number_of_genres'
elif dimension == 'country':
dim_field = 'number_of_countries'
else:
return
cursor.execute('''
SELECT COALESCE(ROUND(1.0 / SUM(POW(1.0 * f.weighted_number_of_ratings / sf.sum_of_weighted_number_of_ratings, 2))), 0) AS effective_number
FROM (SELECT 1.0 * f.number_of_ratings / f.{dim_field} AS weighted_number_of_ratings FROM ktapp_film f INNER JOIN ktapp_filmkeywordrelationship fk ON fk.film_id = f.id AND fk.keyword_id = {keyword_id}) f
INNER JOIN (
SELECT SUM(1.0 * f.number_of_ratings / f.{dim_field}) AS sum_of_weighted_number_of_ratings FROM (SELECT f.number_of_ratings, f.{dim_field} FROM ktapp_film f INNER JOIN ktapp_filmkeywordrelationship fk ON fk.film_id = f.id AND fk.keyword_id = {keyword_id}) f
) sf
'''.format(
keyword_id=keyword_id,
dim_field=dim_field,
))
effective_number_of_films = int(cursor.fetchone()[0])
if effective_number_of_films >= 20:
segment, _ = models.ProfileSegment.objects.get_or_create(dimension=dimension, segment=keyword_id)
segment.effective_number_of_films = effective_number_of_films
segment.save()
return segment
return None
def update_keyword_usersegment(self, cursor, dimension, keyword_id):
try:
segment = models.ProfileSegment.objects.get(dimension=dimension, segment=keyword_id)
except models.ProfileSegment.DoesNotExist:
return
self.stdout.write('Keyword usersegment %s:%s...' % (dimension, keyword_id))
if dimension == 'genre':
dim_field = 'number_of_genres'
elif dimension == 'country':
dim_field = 'number_of_countries'
else:
return
cursor.execute('''
DELETE FROM ktapp_userprofilesegment
WHERE segment_id = {segment_id}
'''.format(segment_id=segment.id))
cursor.execute('''
INSERT INTO ktapp_userprofilesegment (user_id, segment_id, number_of_votes, relative_number_of_votes, ratio_of_films, score)
SELECT id, {segment_id}, ROUND(weighted_number_of_ratings), ROUND(10000.0 * weighted_number_of_ratings / {effective_number_of_films}), 10000, 0
FROM (
SELECT u.id, SUM(1.0 / f.{dim_field}) AS weighted_number_of_ratings
FROM ktapp_ktuser u
INNER JOIN ktapp_vote v ON v.user_id = u.id
INNER JOIN ktapp_filmkeywordrelationship fk ON fk.film_id = v.film_id AND fk.keyword_id = {keyword_id}
INNER JOIN ktapp_film f ON f.id = v.film_id
GROUP BY u.id
) uu
'''.format(
segment_id=segment.id,
keyword_id=keyword_id,
effective_number_of_films=segment.effective_number_of_films,
dim_field=dim_field,
))
def update_year_segment(self, cursor, year):
self.stdout.write('Year segment %s...' % year)
cursor.execute('''
SELECT ROUND(1.0 / SUM(POW(1.0 * f.number_of_ratings / sf.sum_of_number_of_ratings, 2))) AS effective_number
FROM (SELECT number_of_ratings FROM ktapp_film WHERE year BETWEEN {min_year} AND {max_year}) f
INNER JOIN (
SELECT SUM(number_of_ratings) AS sum_of_number_of_ratings FROM ktapp_film WHERE year BETWEEN {min_year} AND {max_year}
) sf
'''.format(
min_year=1800 if year < MINIMUM_YEAR else year,
max_year=MINIMUM_YEAR - 1 if year < MINIMUM_YEAR else year + 9,
))
effective_number_of_films = int(cursor.fetchone()[0])
if effective_number_of_films >= 20:
segment, _ = models.ProfileSegment.objects.get_or_create(dimension='year', segment=year)
segment.effective_number_of_films = effective_number_of_films
segment.save()
return segment
return None
def update_year_usersegment(self, cursor, year):
try:
segment = models.ProfileSegment.objects.get(dimension='year', segment=year)
except models.ProfileSegment.DoesNotExist:
return
self.stdout.write('Year usersegment %s...' % year)
cursor.execute('''
DELETE FROM ktapp_userprofilesegment
WHERE segment_id = {segment_id}
'''.format(segment_id=segment.id))
cursor.execute('''
INSERT INTO ktapp_userprofilesegment (user_id, segment_id, number_of_votes, relative_number_of_votes, ratio_of_films, score)
SELECT id, {segment_id}, ROUND(weighted_number_of_ratings), ROUND(10000.0 * weighted_number_of_ratings / {effective_number_of_films}), 10000, 0
FROM (
SELECT u.id, COUNT(1) AS weighted_number_of_ratings
FROM ktapp_ktuser u
INNER JOIN ktapp_vote v ON v.user_id = u.id
INNER JOIN ktapp_film f ON f.id = v.film_id AND f.year BETWEEN {min_year} AND {max_year}
GROUP BY u.id
) uu
'''.format(
segment_id=segment.id,
min_year=1800 if year < MINIMUM_YEAR else year,
max_year=MINIMUM_YEAR - 1 if year < MINIMUM_YEAR else year + 9,
effective_number_of_films=segment.effective_number_of_films,
))
def update_segment_all(self, cursor, dimension):
self.stdout.write('Segment %s:all...' % dimension)
cursor.execute('''
UPDATE ktapp_profilesegment ps
INNER JOIN (
SELECT ps.id, ps.effective_number_of_films
FROM ktapp_profilesegment ps
WHERE ps.dimension = '{dimension}'
) detailed_ps
INNER JOIN (
SELECT SUM(ps.effective_number_of_films) AS effective_number_of_films
FROM ktapp_profilesegment ps
WHERE ps.dimension = '{dimension}'
) sum_ps
SET ps.ratio_of_films = ROUND(10000.0 * detailed_ps.effective_number_of_films / sum_ps.effective_number_of_films)
WHERE
detailed_ps.id = ps.id
'''.format(dimension=dimension))
def update_usersegment_all(self, cursor, dimension):
self.stdout.write('Usersegment %s:all...' % dimension)
cursor.execute('''
UPDATE ktapp_userprofilesegment ups
INNER JOIN (
SELECT ups.user_id, ups.segment_id, ups.number_of_votes
FROM ktapp_userprofilesegment ups
INNER JOIN ktapp_profilesegment ps
ON ps.id = ups.segment_id AND ps.dimension = '{dimension}'
) detailed_ups
INNER JOIN (
SELECT ups.user_id, SUM(ups.number_of_votes) AS number_of_votes
FROM ktapp_userprofilesegment ups
INNER JOIN ktapp_profilesegment ps
ON ps.id = ups.segment_id AND ps.dimension = '{dimension}'
GROUP BY ups.user_id
) sum_ups
SET ups.ratio_of_films = ROUND(10000.0 * detailed_ups.number_of_votes / sum_ups.number_of_votes)
WHERE
detailed_ups.user_id = ups.user_id
AND detailed_ups.segment_id = ups.segment_id
AND sum_ups.user_id = ups.user_id
AND sum_ups.number_of_votes > 0
'''.format(dimension=dimension))
|
py | b416cd5ccb2d2e3b01e22bc3d8c6b28b937773da | # Generated by Django 2.2.2 on 2020-05-12 04:32
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
|
py | b416cde65d4eade70d4aa48e9a0e8dcc776d486c | from simple_api.adapters.graphql.graphql import GraphQLAdapter
from simple_api.adapters.utils import generate
from simple_api.django_object.django_object import DjangoObject
from .models import TestModelPrimitiveFields
from tests.graphql.graphql_test_utils import build_patterns
class TestModelObjectAllFields(DjangoObject):
model = TestModelPrimitiveFields
class_for_related = False
detail_action = None
update_action = None
delete_action = None
list_action = None
class TestModelObjectOnlyFields(DjangoObject):
model = TestModelPrimitiveFields
only_fields = ("int_field", "float_field")
detail_action = None
update_action = None
delete_action = None
list_action = None
class TestModelObjectExcludeFields(DjangoObject):
model = TestModelPrimitiveFields
class_for_related = False
exclude_fields = ("string_char_field", "string_text_field")
detail_action = None
update_action = None
delete_action = None
list_action = None
schema = generate(GraphQLAdapter)
patterns = build_patterns(schema)
|
py | b416cdf1b9eabba1298ee5c8a458a5b638ae341c | from torchvision import models
import torch.nn as nn
from PIL import Image
import numpy as np
import torch # for debug only
import itertools
from backboned_unet import Unet # https://github.com/mkisantal/backboned-unet
fixed_random_noise_vector = [0.33963535, -0.70369039, 0.62590457, 0.59152784, 0.4051563,
0.26512166, 0.25203669, -0.39983498, 0.66386131, -0.94438161]
class GeneratorResidualModule(nn.Module):
""" Residual module based on torchvision.models.resnet.BasicBlock"""
expansion = 1
def __init__(self, inplanes, planes, stride=1):
super(GeneratorResidualModule, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
# self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out += residual
out = self.relu(out)
return out
class PixelDAGenerator(nn.Module):
""" Generator based on Pixel-Level Domain Adaptation paper (arXiv:1612.05424v2). """
# TODO: consider adding random input vector
def __init__(self, num_channels, latent_dim=10, im_size=224):
super(PixelDAGenerator, self).__init__()
self.fixed_noise = torch.tensor(fixed_random_noise_vector)
self.noise_in = nn.Linear(latent_dim, im_size**2, bias=False)
self.noise_bn = nn.BatchNorm1d(im_size**2)
self.conv1 = nn.Conv2d(num_channels+1, 64, kernel_size=3, stride=1, padding=1, bias=False)
# self.bn1 = nn.BatchNorm2d(64) # not included according to TF implementation
self.relu = nn.ReLU(inplace=True)
self.res_block1 = GeneratorResidualModule(64, 64)
self.res_block2 = GeneratorResidualModule(64, 64)
self.res_block3 = GeneratorResidualModule(64, 64)
self.final_conv = nn.Conv2d(64, 3, kernel_size=3, stride=1, padding=1, bias=False)
self.tanh = nn.Tanh()
def forward(self, x, z=None):
if z is None:
z = self.fixed_noise.repeat(x.shape[0], 1)
z = self.noise_in(z.cuda())
z = self.noise_bn(z)
x = torch.cat([x, z.view([x.shape[0], 1, x.shape[2], x.shape[3]])], 1)
x = self.conv1(x)
x = self.relu(x)
x = self.res_block1(x)
x = self.res_block2(x)
x = self.res_block3(x)
x = self.final_conv(x)
out = self.tanh(x)
return out
class DiscriminatorModule(nn.Module):
""" Conv-BN-LeakyRelu module for PixelDADiscriminator, and for PatchGAN. """
def __init__(self, inplanes, kernel_size=3, stride=None, padding=1):
super(DiscriminatorModule, self).__init__()
planes = 64 if inplanes == 3 else 2*inplanes
if stride is None:
stride = 1 if inplanes == 3 else 2
self.conv = nn.Conv2d(inplanes, planes, kernel_size=kernel_size,
stride=stride, padding=padding, bias=False)
self.bn = nn.BatchNorm2d(planes)
self.lrelu = nn.LeakyReLU(.2, inplace=True)
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.lrelu(x)
return x
class PixelDADiscriminator(nn.Module):
""" Discriminator based on Pixel-Level Domain Adaptation paper (arXiv:1612.05424v2). """
def __init__(self):
super(PixelDADiscriminator, self).__init__()
self.block1 = DiscriminatorModule(3)
self.block2 = DiscriminatorModule(64)
self.block3 = DiscriminatorModule(128)
self.block4 = DiscriminatorModule(256)
self.block5 = DiscriminatorModule(512)
self.block6 = DiscriminatorModule(1024)
self.block7 = DiscriminatorModule(2048)
self.fc = nn.Linear(4096*4*4, 1)
self.sigmoid = nn.Sigmoid()
def forward(self, x, return_logit=True):
x = self.block1(x)
x = self.block2(x)
x = self.block3(x)
x = self.block4(x)
x = self.block5(x)
x = self.block6(x)
x = self.block7(x)
x = self.fc(x.view(x.size(0), -1))
out = self.sigmoid(x)
if return_logit:
return out, x
else:
return out
class PatchGanDiscriminator(nn.Module):
def __init__(self):
super(PatchGanDiscriminator, self).__init__()
# self.block1 = DiscriminatorModule(3, kernel_size=4, stride=2)
self.conv1 = nn.Conv2d(3, 64, kernel_size=4, stride=2, padding=1, bias=True)
self.lrelu1 = nn.LeakyReLU(negative_slope=.2, inplace=True)
self.block2 = DiscriminatorModule(64, kernel_size=4, stride=2)
self.block3 = DiscriminatorModule(128, kernel_size=4, stride=2)
self.block4 = DiscriminatorModule(256, kernel_size=4, stride=1)
self.last_conv = nn.Conv2d(512, 1, kernel_size=4, stride=1, padding=1, bias=True)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
x = self.conv1(x)
x = self.lrelu1(x)
x = self.block2(x)
x = self.block3(x)
x = self.block4(x)
x = self.last_conv(x)
out = self.sigmoid(x)
return out, x
class PixelGanDiscriminator(nn.Module):
def __init__(self):
super(PixelGanDiscriminator, self).__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=1, stride=1, padding=0, bias=True)
self.lrelu1 = nn.LeakyReLU(negative_slope=.2, inplace=True)
self.block2 = DiscriminatorModule(64, kernel_size=1, stride=1, padding=0)
self.last_conv = nn.Conv2d(128, 1, kernel_size=1, stride=1, padding=0, bias=True)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
x = self.conv1(x)
x = self.lrelu1(x)
x = self.block2(x)
x = self.last_conv(x)
out = self.sigmoid(x)
return out, x
class MultiChannelNet(nn.Module):
""" Class for experiments with non-RGB inputs for pre-trained networks."""
def __init__(self,
num_channels=3,
num_classes=10,
input_mode=None,
segmentation=True,
backbone_kwargs=None):
super(MultiChannelNet, self).__init__()
self.input_transform_module = None
# self.rgb_net = models.resnet50(pretrained=True)
if backbone_kwargs is not None:
self.rgb_net = Unet(classes=num_classes)
else:
self.rgb_net = Unet(**backbone_kwargs)
self.segmentation_mode = segmentation
if input_mode == 'replace_conv1':
conv1 = nn.Conv2d(num_channels, 64, kernel_size=7, stride=2, padding=3, bias=False)
if segmentation:
self.rgb_net.backbone.conv1 = conv1
nn.init.kaiming_normal_(self.rgb_net.backbone.conv1.weight, mode='fan_out', nonlinearity='relu')
else:
self.rgb_net.conv1 = conv1
nn.init.kaiming_normal_(self.rgb_net.conv1.weight, mode='fan_out', nonlinearity='relu')
if input_mode == 'domain_adapter':
# defining network for input transformation (architecture based on PixelDA)
self.input_transform_module = PixelDAGenerator(num_channels)
self.conv1_replaced = input_mode == 'replace_conv1'
# replace output layer
if not segmentation:
fc_in_features = self.rgb_net.fc.in_features
self.rgb_net.fc = nn.Linear(fc_in_features, num_classes)
def forward(self, x, z=None):
""" Running inference on HSI with generator + classifier. """
if self.input_transform_module is not None:
three_channel_image = self.input_transform_module(x, z)
else:
three_channel_image = x
output = self.rgb_net(three_channel_image)
return output, three_channel_image
def classify(self, x):
""" Running only classification on 3 channel input"""
x = self.rgb_net(x)
return x
def set_finetuning(self):
""" Setting all model parameters trainable. """
for subnetwork in [self.rgb_net, self.input_transform_module]:
if subnetwork is not None:
for param in subnetwork.parameters():
param.requires_grad = True
return
def set_feature_extracting(self):
""" Freeze rgb net, replaced layers, generator and discriminator trainable. """
if self.segmentation_mode:
# only encoder part of U-net should be frozen
self.rgb_net.freeze_encoder()
if self.conv1_replaced:
for param in self.rgb_net.backbone.conv1.parameters():
param.requires_grad = True
else:
for param in self.rgb_net.parameters():
param.requires_grad = False
for param in self.rgb_net.fc.parameters():
param.requires_grad = True
if self.conv1_replaced:
for param in self.rgb_net.conv1.parameters():
param.requires_grad = True
for subnetwork in [self.input_transform_module]:
if subnetwork is not None:
for param in subnetwork.parameters():
param.requires_grad = True
return
def get_transformed_input(self, x, pil=False):
""" For inspecting the image we are feeding to the pre-trained network. """
if self.input_transform_module is not None:
x = self.input_transform_module(x)
if not pil:
return x
else:
x += x.min()
x *= 255.0 / x.max()
img = np.uint8(x.cpu().numpy()).transpose()
return Image.fromarray(img)
def get_params_for_opt(self, training_phase='generator'):
""" Parameter iterators for initializing pytorch optimizers. """
# TODO: remove this, use get_pretrained_parameters and get_random_initialized_parameters instead.
if training_phase == 'generator':
if self.input_transform_module is not None:
return itertools.chain(self.input_transform_module.parameters(), self.rgb_net.parameters())
else:
return self.rgb_net.parameters()
if training_phase == 'discriminator':
return self.domain_discriminator.parameters()
def get_pretrained_parameters(self):
""" Returning iterator for pretrained parametes, to be used with PyTorch optimizer"""
if self.segmentation_mode:
return self.rgb_net.get_pretrained_parameters()
else:
raise NotImplementedError('Only segmentation for now.')
def get_random_initialized_parameters(self):
""" Returning iterator for new, randomly initialized parametes, to be used with PyTorch optimizer"""
if self.segmentation_mode:
iterators = [self.rgb_net.get_random_initialized_parameters()]
if self.input_transform_module is not None:
iterators.append(self.input_transform_module.parameters())
return itertools.chain(*iterators)
else:
raise NotImplementedError('Only segmentation for now.')
if __name__ == '__main__':
net = MultiChannelNet(num_channels=13,
num_classes=10,
input_mode='replace_conv1')
hsi = torch.ones([1, 13, 224, 224])
rgb = torch.ones([1, 3, 224, 224])
# print(net)
# net.set_feature_extracting()
# net.set_finetuning()
# input = torch.ones([1, 3, 224, 224])
#
# net2 = PixelDADiscriminator()
# print(net2)
#
# net2(input)
print('done.')
|
py | b416cdfc2723c29f26d1c8b3d52a3008cc91762e | # Copyright 2015 The go-python Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
## py2/py3 compat
from __future__ import print_function
import sys
_PY3 = sys.version_info[0] == 3
if _PY3:
xrange = range
import seqs
### test docs
print("doc(seqs): %s" % repr(seqs.__doc__).lstrip('u'))
# note: arrays not settable from python -- use slices instead
# print("arr = seqs.Array(xrange(2))")
# arr = seqs.Array(xrange(2))
# print("arr = %s" % (arr,))
#
print("s = seqs.Slice()")
s = seqs.Slice()
print("s = %s" % (s,))
print("s = seqs.Slice([1,2])")
s = seqs.Slice([1,2])
print("s = %s" % (s,))
print("s = seqs.Slice(range(10))")
s = seqs.Slice(range(10))
print("s = %s" % (s,))
print("s = seqs.Slice(xrange(10))")
s = seqs.Slice(xrange(10))
print("s = %s" % (s,))
print("s = seqs.Slice()")
s = seqs.Slice()
print("s = %s" % (s,))
print("s += [1,2]")
s += [1,2]
print("s = %s" % (s,))
print("s += [10,20]")
s += [10,20]
print("s = %s" % (s,))
print("OK")
|
py | b416ce0c8d0395149a73f8f804da3331210b0c31 | # Generated by Django 3.1.7 on 2022-02-28 13:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='config',
name='textoRodape',
field=models.CharField(blank=True, max_length=1024, null=True, verbose_name='Texto Rodapé'),
),
]
|
py | b416cfd6bebdcba60caca541bd7e5099ef7ba18d | from __future__ import unicode_literals
import base64
import hashlib
import hmac
import json
import time
from django import template
from django.conf import settings
from django.contrib.sites.models import Site
register = template.Library()
# Set the disqus_developer variable to 0/1. Default is 0
@register.simple_tag(takes_context=True)
def set_disqus_developer(context, disqus_developer):
context['disqus_developer'] = disqus_developer
return ""
# Set the disqus_identifier variable to some unique value. Defaults to page's URL
@register.simple_tag(takes_context=True)
def set_disqus_identifier(context, *args):
context['disqus_identifier'] = "".join(args)
return ""
# Set the disqus_url variable to some value. Defaults to page's location
@register.simple_tag(takes_context=True)
def set_disqus_url(context, *args):
context['disqus_url'] = "".join(args)
return ""
# Set the disqus_title variable to some value. Defaults to page's title or URL
@register.simple_tag(takes_context=True)
def set_disqus_title(context, disqus_title):
context['disqus_title'] = disqus_title
return ""
# Set the disqus_category_id variable to some value. No default. See
# http://help.disqus.com/customer/portal/articles/472098-javascript-configuration-variables#disqus_category_id
@register.simple_tag(takes_context=True)
def set_disqus_category_id(context, disqus_category_id):
context['disqus_category_id'] = disqus_category_id
return ""
def get_config(context):
"""
Return the formatted javascript for any disqus config variables.
"""
conf_vars = ['disqus_developer',
'disqus_identifier',
'disqus_url',
'disqus_title',
'disqus_category_id'
]
js = '\tvar {} = "{}";'
output = [js.format(item, context[item]) for item in conf_vars \
if item in context]
return '\n'.join(output)
@register.inclusion_tag('disqus/disqus_dev.html', takes_context=True)
def disqus_dev(context):
"""
Return the HTML/js code to enable DISQUS comments on a local
development server if settings.DEBUG is True.
"""
if settings.DEBUG:
disqus_url = '//{}{}'.format(
Site.objects.get_current().domain,
context['request'].path
)
return {'disqus_url': disqus_url}
return {}
@register.inclusion_tag('disqus/disqus_sso.html', takes_context=True)
def disqus_sso(context):
"""
Return the HTML/js code to enable DISQUS SSO - so logged in users on
your site can be logged in to disqus seemlessly.
"""
DISQUS_SECRET_KEY = getattr(settings, 'DISQUS_SECRET_KEY', None)
if DISQUS_SECRET_KEY is None:
return "<p>You need to set DISQUS_SECRET_KEY before you can use SSO</p>"
DISQUS_PUBLIC_KEY = getattr(settings, 'DISQUS_PUBLIC_KEY', None)
if DISQUS_PUBLIC_KEY is None:
return "<p>You need to set DISQUS_PUBLIC_KEY before you can use SSO</p>"
user = context['user']
if user.is_anonymous():
return ""
# create a JSON packet of our data attributes
data = json.dumps({
'id': user.id,
'username': user.username,
'email': user.email,
})
# encode the data to base64
message = base64.b64encode(data.encode('utf-8'))
# generate a timestamp for signing the message
timestamp = int(time.time())
key = DISQUS_SECRET_KEY.encode('utf-8')
msg = ('%s %s' % (message, timestamp)).encode('utf-8')
digestmod = hashlib.sha1
# generate our hmac signature
sig = hmac.HMAC(key, msg, digestmod).hexdigest()
return dict(
message=message,
timestamp=timestamp,
sig=sig,
pub_key=DISQUS_PUBLIC_KEY,
)
@register.inclusion_tag('disqus/num_replies.html', takes_context=True)
def disqus_num_replies(context, shortname=''):
"""
Return the HTML/js code which transforms links that end with an
#disqus_thread anchor into the threads comment count.
"""
shortname = getattr(settings, 'DISQUS_WEBSITE_SHORTNAME', shortname)
return {
'shortname': shortname,
'config': get_config(context),
}
@register.inclusion_tag('disqus/recent_comments.html', takes_context=True)
def disqus_recent_comments(context, shortname='', num_items=5, excerpt_length=200, hide_avatars=0, avatar_size=32):
"""
Return the HTML/js code which shows recent comments.
"""
shortname = getattr(settings, 'DISQUS_WEBSITE_SHORTNAME', shortname)
return {
'shortname': shortname,
'num_items': num_items,
'hide_avatars': hide_avatars,
'avatar_size': avatar_size,
'excerpt_length': excerpt_length,
'config': get_config(context),
}
@register.inclusion_tag('disqus/show_comments.html', takes_context=True)
def disqus_show_comments(context, shortname=''):
"""
Return the HTML code to display DISQUS comments.
"""
shortname = getattr(settings, 'DISQUS_WEBSITE_SHORTNAME', shortname)
return {
'shortname': shortname,
'config': get_config(context),
}
|
py | b416cfe7a8e12817d311623631b595fe29b882b3 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.17
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V2beta2MetricSpec(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'external': 'V2beta2ExternalMetricSource',
'object': 'V2beta2ObjectMetricSource',
'pods': 'V2beta2PodsMetricSource',
'resource': 'V2beta2ResourceMetricSource',
'type': 'str'
}
attribute_map = {
'external': 'external',
'object': 'object',
'pods': 'pods',
'resource': 'resource',
'type': 'type'
}
def __init__(self, external=None, object=None, pods=None, resource=None, type=None, local_vars_configuration=None): # noqa: E501
"""V2beta2MetricSpec - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._external = None
self._object = None
self._pods = None
self._resource = None
self._type = None
self.discriminator = None
if external is not None:
self.external = external
if object is not None:
self.object = object
if pods is not None:
self.pods = pods
if resource is not None:
self.resource = resource
self.type = type
@property
def external(self):
"""Gets the external of this V2beta2MetricSpec. # noqa: E501
:return: The external of this V2beta2MetricSpec. # noqa: E501
:rtype: V2beta2ExternalMetricSource
"""
return self._external
@external.setter
def external(self, external):
"""Sets the external of this V2beta2MetricSpec.
:param external: The external of this V2beta2MetricSpec. # noqa: E501
:type: V2beta2ExternalMetricSource
"""
self._external = external
@property
def object(self):
"""Gets the object of this V2beta2MetricSpec. # noqa: E501
:return: The object of this V2beta2MetricSpec. # noqa: E501
:rtype: V2beta2ObjectMetricSource
"""
return self._object
@object.setter
def object(self, object):
"""Sets the object of this V2beta2MetricSpec.
:param object: The object of this V2beta2MetricSpec. # noqa: E501
:type: V2beta2ObjectMetricSource
"""
self._object = object
@property
def pods(self):
"""Gets the pods of this V2beta2MetricSpec. # noqa: E501
:return: The pods of this V2beta2MetricSpec. # noqa: E501
:rtype: V2beta2PodsMetricSource
"""
return self._pods
@pods.setter
def pods(self, pods):
"""Sets the pods of this V2beta2MetricSpec.
:param pods: The pods of this V2beta2MetricSpec. # noqa: E501
:type: V2beta2PodsMetricSource
"""
self._pods = pods
@property
def resource(self):
"""Gets the resource of this V2beta2MetricSpec. # noqa: E501
:return: The resource of this V2beta2MetricSpec. # noqa: E501
:rtype: V2beta2ResourceMetricSource
"""
return self._resource
@resource.setter
def resource(self, resource):
"""Sets the resource of this V2beta2MetricSpec.
:param resource: The resource of this V2beta2MetricSpec. # noqa: E501
:type: V2beta2ResourceMetricSource
"""
self._resource = resource
@property
def type(self):
"""Gets the type of this V2beta2MetricSpec. # noqa: E501
type is the type of metric source. It should be one of \"Object\", \"Pods\" or \"Resource\", each mapping to a matching field in the object. # noqa: E501
:return: The type of this V2beta2MetricSpec. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this V2beta2MetricSpec.
type is the type of metric source. It should be one of \"Object\", \"Pods\" or \"Resource\", each mapping to a matching field in the object. # noqa: E501
:param type: The type of this V2beta2MetricSpec. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V2beta2MetricSpec):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V2beta2MetricSpec):
return True
return self.to_dict() != other.to_dict()
|
py | b416d0ba48ab4287004e53a84fd4702902628fc5 | from.constants_min import*
class MidiIn:
def __init__(self,device,callback=None,debug=False,softthru=False):
if not hasattr(device,'any'):
raise TypeError("device instance must have a 'any' method.")
if not hasattr(device,'read'):
raise TypeError("device instance must have a 'read' method.")
if softthru and not hasattr(device,'write'):
raise TypeError("device instance must have a 'write' method if " "soft thru is enabled.")
self.device=device
self.callback=callback
self.debug=debug
self.softthru=softthru
self._msgbuf=None
self._status=None
self._ignore_active_sense=False
self._ignore_clock=False
self._ignore_sysex=False
def __repr__(self):
return '<MidiIn: device={} callback={}>'.format(self.device,'yes' if callable(self.callback)else 'no')
def poll(self):
msgs=self._read()
if msgs and self.callback:
for msg in msgs:
self.callback(msg)
def ignore_types(self,active_sensing=False,clock=False,sysex=False):
self._ignore_active_sense=active_sensing
self._ignore_clock=clock
self._ignore_sysex=sysex
def _error(self,msg,*args):
if self.debug:
import sys
print(msg%args,file=sys.stderr)
def _read(self):
msgs=[]
while self.device.any():
data=self.device.read(1)[0]
if self.softthru:
self.device.write(bytes([data]))
if data&0x80:
if TIMING_CLOCK<=data<=SYSTEM_RESET:
if data==ACTIVE_SENSING and self._ignore_active_sensing:
continue
elif data==TIMING_CLOCK and self._ignore_clock:
continue
elif data!=0xFD:
msgs.append(bytearray([data]))
else:
self._error("Read undefined system real-time status " "byte 0x%0X.",data)
elif data==SYSTEM_EXCLUSIVE:
self._status=SYSTEM_EXCLUSIVE
if self._ignore_sysex:
self._msgbuf=None
else:
self._msgbuf=bytearray([data])
elif data==END_OF_EXCLUSIVE:
if self._msgbuf and not self._ignore_sysex:
self._msgbuf.append(data)
msgs.append(self._msgbuf)
self._msgbuf=None
self._status=None
elif MIDI_TIME_CODE<=data<=TUNING_REQUEST:
self._status=None
self._msgbuf=None
if data==TUNING_REQUEST:
msgs.append(bytearray([data]))
elif data<=SONG_SELECT:
self._msgbuf=bytearray([data])
else:
self._error("Read undefined system common status byte " "0x%0X.",data)
else:
self._status=data
self._msgbuf=bytearray([data])
else:
if self._status==SYSTEM_EXCLUSIVE and self._ignore_sysex:
continue
if self._status and not self._msgbuf:
self._msgbuf=bytearray([self._status])
if not self._msgbuf:
self._error("Read unexpected data byte 0x%0X."%data)
continue
self._msgbuf.append(data)
if(self._status!=SYSTEM_EXCLUSIVE and(len(self._msgbuf)==3 or self._msgbuf[0]&0xF0 in(PROGRAM_CHANGE,CHANNEL_PRESSURE,MTC,SPP))):
msgs.append(self._msgbuf)
self._msgbuf=None
return msgs
|
py | b416d159de93c6a3523874f5a234e817365f9e53 | import pysgrid
import numpy as np
node_lon = np.array(([1, 3, 5], [1, 3, 5], [1, 3, 5]))
node_lat = np.array(([1, 1, 1], [3, 3, 3], [5, 5, 5]))
edge2_lon = np.array(([0, 2, 4, 6], [0, 2, 4, 6], [0, 2, 4, 6]))
edge2_lat = np.array(([1, 1, 1, 1], [3, 3, 3, 3], [5, 5, 5, 5]))
edge1_lon = np.array(([1, 3, 5], [1, 3, 5], [1, 3, 5], [1, 3, 5]))
edge1_lat = np.array(([0, 0, 0], [2, 2, 2], [4, 4, 4], [6, 6, 6]))
center_lon = np.array(([0, 2, 4, 6], [0, 2, 4, 6], [0, 2, 4, 6], [0, 2, 4, 6]))
center_lat = np.array(([0, 0, 0, 0], [2, 2, 2, 2], [4, 4, 4, 4], [6, 6, 6, 6]))
sgrid = pysgrid.SGrid(node_lon=node_lon,
node_lat=node_lat,
edge1_lon=edge1_lon,
edge1_lat=edge1_lat,
edge2_lon=edge2_lon,
edge2_lat=edge2_lat,
center_lon=center_lon,
center_lat=center_lat)
c_var = np.array(([0, 0, 0, 0], [0, 1, 2, 0], [0, 2, 1, 0], [0, 0, 0, 0]))
e2_var = np.array(([1, 0, 0, 1], [0, 1, 2, 0], [0, 0, 0, 0]))
e1_var = np.array(([1, 1, 0], [0, 1, 0], [0, 2, 0], [1, 1, 0]))
n_var = np.array(([0, 1, 0], [1, 0, 1], [0, 1, 0]))
ptsx, ptsy = np.mgrid[0:6:600j, 0:6:600j]
pts = np.stack((ptsx, ptsy), axis=-1)
interp_c = sgrid.interpolate_var_to_points(pts, c_var).reshape(600, 600)
interp_e1 = sgrid.interpolate_var_to_points(pts, e1_var).reshape(600, 600).T
interp_e2 = sgrid.interpolate_var_to_points(pts, e2_var).reshape(600, 600).T
interp_n = sgrid.interpolate_var_to_points(pts, n_var).reshape(600, 600)
import matplotlib.pyplot as plt
plt.subplot(221)
plt.imshow(interp_c, extent=(0, 6, 0, 6), origin='lower')
plt.vlines(center_lon, center_lat[0], center_lat[-1])
plt.hlines(center_lon, center_lat[0], center_lat[-1])
plt.title('rho grid interpolation')
plt.subplot(222)
plt.imshow(interp_e1, extent=(0, 6, 0, 6), origin='lower')
plt.vlines(edge2_lon, center_lat[0], center_lat[-1])
plt.hlines(center_lon, edge1_lat[0], edge1_lat[-1])
plt.title('u grid interpolation')
plt.subplot(223)
plt.imshow(interp_e2, extent=(0, 6, 0, 6), origin='lower')
plt.vlines(center_lon, node_lat[0], node_lat[-1])
plt.hlines(edge2_lon, center_lat[0], center_lat[-1])
plt.title('v grid interpolation')
plt.subplot(224)
plt.imshow(interp_n, extent=(0, 6, 0, 6), origin='lower')
plt.vlines(node_lon, node_lat[0], node_lat[-1])
plt.hlines(node_lon, node_lat[0], node_lat[-1])
plt.title('psi grid interpolation')
plt.show()
|
py | b416d21e1ee759432a42cfdcaaf484d8fd1565f4 | # Copyright (c) 2021, Zenqi
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from . import loguru
from . import lemondb
__all__ = ('loguru', 'lemondb')
|
py | b416d5ae2d4bb0cc93be9107293facaf2154fa0a | import inspect
from typing import List, Optional
import pandas as pd
from phc.easy.auth import Auth
from phc.easy.frame import Frame
from phc.easy.abstract.genomic_variant import GenomicVariant
from phc.easy.omics.options.genomic_test import (
GenomicTestStatus,
GenomicTestType,
)
from phc.easy.omics.options.common import GenomicVariantInclude
from phc.easy.omics.options.genomic_expression import GenomicExpressionOptions
from phc.easy.abstract.paging_api_item import PagingApiItem
class GenomicExpression(GenomicVariant):
@staticmethod
def resource_path():
return "genomics/expressions"
@staticmethod
def params_class():
return GenomicExpressionOptions
@staticmethod
def transform_results(data_frame: pd.DataFrame, params={}, **expand_args):
def expand_id(id_column: pd.Series):
return pd.concat(
[
id_column,
id_column.str.split(":", expand=True).rename(
columns={0: "variant_set_id"}
)["variant_set_id"],
],
axis=1,
)
args = {
**expand_args,
"custom_columns": [
*expand_args.get("custom_columns", []),
("id", expand_id),
],
}
return Frame.expand(data_frame, **args)
@classmethod
def get_data_frame(
cls,
# Query parameters
variant_set_ids: List[str] = [],
include: List[GenomicVariantInclude] = [],
gene: List[str] = [],
expression: Optional[str] = None,
outlier_std_dev: str = None,
in_ckb: Optional[bool] = None,
order_by: Optional[str] = None,
# Execution parameters,
all_results: bool = False,
auth_args: Auth = Auth.shared(),
max_pages: Optional[int] = None,
page_size: Optional[int] = None,
log: bool = False,
**kw_args,
):
"""Execute a request for genomic expression
## Parameters
Query: `phc.easy.omics.options.genomic_expression.GenomicExpressionOptions`
Execution: `phc.easy.query.Query.execute_paging_api`
Expansion: `phc.easy.frame.Frame.expand`
"""
args = cls._get_current_args(inspect.currentframe(), locals())
return super().get_data_frame(
test_type=GenomicTestType.EXPRESSION, **{**kw_args, **args}
)
|
py | b416d97e2505a9035b423245fcaac3d3e19b5b04 | # Leo colorizer control file for haskell mode.
# This file is in the public domain.
# Properties for haskell mode.
properties = {
"commentEnd": "-}",
"commentStart": "{-",
"indentSize": "8",
"lineComment": "--",
"tabSize": "8",
}
# Attributes dict for haskell_main ruleset.
haskell_main_attributes_dict = {
"default": "null",
"digit_re": "",
"escape": "\\",
"highlight_digits": "true",
"ignore_case": "false",
"no_word_sep": "",
}
# Dictionary of attributes dictionaries for haskell mode.
attributesDictDict = {
"haskell_main": haskell_main_attributes_dict,
}
# Keywords dict for haskell_main ruleset.
haskell_main_keywords_dict = {
":": "literal2",
"Addr": "keyword3",
"Bool": "keyword3",
"Bounded": "keyword3",
"Char": "keyword3",
"Double": "keyword3",
"EQ": "literal2",
"Either": "keyword3",
"Enum": "keyword3",
"Eq": "keyword3",
"False": "literal2",
"FilePath": "keyword3",
"Float": "keyword3",
"Floating": "keyword3",
"Fractional": "keyword3",
"Functor": "keyword3",
"GT": "literal2",
"IO": "keyword3",
"IOError": "keyword3",
"IOResult": "keyword3",
"Int": "keyword3",
"Integer": "keyword3",
"Integral": "keyword3",
"Ix": "keyword3",
"Just": "literal2",
"LT": "literal2",
"Left": "literal2",
"Maybe": "keyword3",
"Monad": "keyword3",
"Nothing": "literal2",
"Num": "keyword3",
"Ord": "keyword3",
"Ordering": "keyword3",
"Ratio": "keyword3",
"Rational": "keyword3",
"Read": "keyword3",
"ReadS": "keyword3",
"Real": "keyword3",
"RealFloat": "keyword3",
"RealFrac": "keyword3",
"Right": "literal2",
"Show": "keyword3",
"ShowS": "keyword3",
"String": "keyword3",
"True": "literal2",
"_": "keyword1",
"as": "keyword1",
"case": "keyword1",
"class": "keyword1",
"data": "keyword1",
"default": "keyword1",
"deriving": "keyword1",
"div": "operator",
"do": "keyword1",
"elem": "operator",
"else": "keyword1",
"hiding": "keyword1",
"if": "keyword1",
"import": "keyword1",
"in": "keyword1",
"infix": "keyword1",
"infixl": "keyword1",
"infixr": "keyword1",
"instance": "keyword1",
"let": "keyword1",
"mod": "operator",
"module": "keyword1",
"newtype": "keyword1",
"notElem": "operator",
"of": "keyword1",
"qualified": "keyword1",
"quot": "operator",
"rem": "operator",
"seq": "operator",
"then": "keyword1",
"type": "keyword1",
"where": "keyword1",
}
# Dictionary of keywords dictionaries for haskell mode.
keywordsDictDict = {
"haskell_main": haskell_main_keywords_dict,
}
# Rules for haskell_main ruleset.
def haskell_rule0(colorer, s, i):
return colorer.match_span(s, i, kind="comment2", begin="{-#", end="#-}",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def haskell_rule1(colorer, s, i):
return colorer.match_span(s, i, kind="comment1", begin="{-", end="-}",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False, no_word_break=False)
def haskell_rule2(colorer, s, i):
return colorer.match_eol_span(s, i, kind="comment1", seq="--",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="", exclude_match=False)
def haskell_rule3(colorer, s, i):
return colorer.match_span(s, i, kind="literal1", begin="\"", end="\"",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=True, no_word_break=False)
def haskell_rule4(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="' '",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule5(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'!'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule6(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'\"'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule7(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'$'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule8(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'%'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule9(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'/'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule10(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'('",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule11(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="')'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule12(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'['",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule13(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="']'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule14(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'+'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule15(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'-'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule16(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'*'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule17(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'='",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule18(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'/'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule19(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'^'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule20(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'.'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule21(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="','",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule22(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="':'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule23(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="';'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule24(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'<'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule25(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'>'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule26(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'|'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule27(colorer, s, i):
return colorer.match_seq(s, i, kind="literal1", seq="'@'",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule28(colorer, s, i):
return colorer.match_span(s, i, kind="literal1", begin="'", end="'",
at_line_start=False, at_whitespace_end=False, at_word_start=False,
delegate="",exclude_match=False,
no_escape=False, no_line_break=False,
### no_word_break=True)
no_word_break=False)
def haskell_rule29(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="..",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule30(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="&&",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule31(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="::",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule32(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="<",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule33(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq=">",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule34(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="+",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule35(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="-",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule36(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="*",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule37(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="/",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule38(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="%",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule39(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="^",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule40(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="=",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule41(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="|",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule42(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="@",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule43(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="~",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule44(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="!",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule45(colorer, s, i):
return colorer.match_seq(s, i, kind="operator", seq="$",
at_line_start=False, at_whitespace_end=False, at_word_start=False, delegate="")
def haskell_rule46(colorer, s, i):
return colorer.match_keywords(s, i)
# Rules dict for haskell_main ruleset.
rulesDict1 = {
"!": [haskell_rule44,],
"\"": [haskell_rule3,],
"$": [haskell_rule45,],
"%": [haskell_rule38,],
"&": [haskell_rule30,],
"'": [haskell_rule4,haskell_rule5,haskell_rule6,haskell_rule7,haskell_rule8,haskell_rule9,haskell_rule10,
haskell_rule11,haskell_rule12,haskell_rule13,haskell_rule14,haskell_rule15,haskell_rule16,haskell_rule17,
haskell_rule18,haskell_rule19,haskell_rule20,haskell_rule21,haskell_rule22,haskell_rule23,haskell_rule24,
haskell_rule25,haskell_rule26,haskell_rule27,haskell_rule28,],
"*": [haskell_rule36,],
"+": [haskell_rule34,],
"-": [haskell_rule2,haskell_rule35,],
".": [haskell_rule29,],
"/": [haskell_rule37,],
# Bizarre.
"0": [haskell_rule46,],
"1": [haskell_rule46,],
"2": [haskell_rule46,],
"3": [haskell_rule46,],
"4": [haskell_rule46,],
"5": [haskell_rule46,],
"6": [haskell_rule46,],
"7": [haskell_rule46,],
"8": [haskell_rule46,],
"9": [haskell_rule46,],
# ":": [haskell_rule31,haskell_rule46,],
":": [haskell_rule31,],
"<": [haskell_rule32,],
"=": [haskell_rule40,],
">": [haskell_rule33,],
# "@": [haskell_rule42,haskell_rule46,],
"@": [haskell_rule42,],
"A": [haskell_rule46,],
"B": [haskell_rule46,],
"C": [haskell_rule46,],
"D": [haskell_rule46,],
"E": [haskell_rule46,],
"F": [haskell_rule46,],
"G": [haskell_rule46,],
"H": [haskell_rule46,],
"I": [haskell_rule46,],
"J": [haskell_rule46,],
"K": [haskell_rule46,],
"L": [haskell_rule46,],
"M": [haskell_rule46,],
"N": [haskell_rule46,],
"O": [haskell_rule46,],
"P": [haskell_rule46,],
"Q": [haskell_rule46,],
"R": [haskell_rule46,],
"S": [haskell_rule46,],
"T": [haskell_rule46,],
"U": [haskell_rule46,],
"V": [haskell_rule46,],
"W": [haskell_rule46,],
"X": [haskell_rule46,],
"Y": [haskell_rule46,],
"Z": [haskell_rule46,],
"^": [haskell_rule39,],
"_": [haskell_rule46,],
"a": [haskell_rule46,],
"b": [haskell_rule46,],
"c": [haskell_rule46,],
"d": [haskell_rule46,],
"e": [haskell_rule46,],
"f": [haskell_rule46,],
"g": [haskell_rule46,],
"h": [haskell_rule46,],
"i": [haskell_rule46,],
"j": [haskell_rule46,],
"k": [haskell_rule46,],
"l": [haskell_rule46,],
"m": [haskell_rule46,],
"n": [haskell_rule46,],
"o": [haskell_rule46,],
"p": [haskell_rule46,],
"q": [haskell_rule46,],
"r": [haskell_rule46,],
"s": [haskell_rule46,],
"t": [haskell_rule46,],
"u": [haskell_rule46,],
"v": [haskell_rule46,],
"w": [haskell_rule46,],
"x": [haskell_rule46,],
"y": [haskell_rule46,],
"z": [haskell_rule46,],
"{": [haskell_rule0,haskell_rule1,],
"|": [haskell_rule41,],
"~": [haskell_rule43,],
}
# x.rulesDictDict for haskell mode.
rulesDictDict = {
"haskell_main": rulesDict1,
}
# Import dict for haskell mode.
importDict = {}
|
py | b416d9d70a497ae2193bd1b2f8e819a191a42253 | from .WrappedRadioWindow import WrappedRadioWindow
from .WrappedDownloadWindow import WrappedDownloadWindow
from .WrappedFinishWindow import WrappedFinishWindow
#from..StartingApp.WrappedStartingWindow import WrappedStartingWindow
# logging decorator
import sys
sys.path.append("...")
from logs.logger import debug
class BioequivalenceApp():
def __init__(self, menu_window):
self.settings = {}
self.menu_window = menu_window
self.radio_window = WrappedRadioWindow()
self.down_window = WrappedDownloadWindow()
self.finish_window = WrappedFinishWindow()
self.__build_connections(
[self.menu_window, self.radio_window, self.down_window, self.finish_window])
def __build_connections(self, ordered_windows):
ordered_windows[0].child = ordered_windows[1]
ordered_windows[0].parent = ordered_windows[-1]
ordered_windows[-1].child = ordered_windows[0]
ordered_windows[-1].parent = ordered_windows[-2]
for i in range(1, len(ordered_windows) - 1):
ordered_windows[i].child = ordered_windows[i + 1]
ordered_windows[i].parent = ordered_windows[i - 1]
@debug
def start(self):
self.radio_window.show()
|
py | b416dad86b819907ea82d42a7fbaf94918adb236 | """Subject routes."""
from datetime import datetime, timedelta
import bottle
from pymongo.database import Database
from database.datamodels import default_subject_attributes, latest_datamodel
from database.measurements import measurements_by_metric
from database.reports import insert_new_report, latest_reports, metrics_of_subject
from model.actions import copy_subject, move_item
from model.data import ReportData, SubjectData
from routes.plugins.auth_plugin import EDIT_REPORT_PERMISSION
from server_utilities.functions import report_date_time, uuid
from server_utilities.type import MetricId, ReportId, SubjectId
@bottle.post("/api/v3/subject/new/<report_uuid>", permissions_required=[EDIT_REPORT_PERMISSION])
def post_new_subject(report_uuid: ReportId, database: Database):
"""Create a new subject."""
data_model = latest_datamodel(database)
reports = latest_reports(database)
data = ReportData(data_model, reports, report_uuid)
data.report["subjects"][(subject_uuid := uuid())] = default_subject_attributes(database)
delta_description = f"{{user}} created a new subject in report '{data.report_name}'."
uuids = [report_uuid, subject_uuid]
result = insert_new_report(database, delta_description, (data.report, uuids))
result["new_subject_uuid"] = subject_uuid
return result
@bottle.post("/api/v3/subject/<subject_uuid>/copy/<report_uuid>", permissions_required=[EDIT_REPORT_PERMISSION])
def post_subject_copy(subject_uuid: SubjectId, report_uuid: ReportId, database: Database):
"""Add a copy of the subject to the report (new in v3)."""
data_model = latest_datamodel(database)
reports = latest_reports(database)
source = SubjectData(data_model, reports, subject_uuid)
target = ReportData(data_model, reports, report_uuid)
target.report["subjects"][(subject_copy_uuid := uuid())] = copy_subject(source.subject, source.datamodel)
delta_description = (
f"{{user}} copied the subject '{source.subject_name}' from report "
f"'{source.report_name}' to report '{target.report_name}'."
)
uuids = [target.report_uuid, subject_copy_uuid]
result = insert_new_report(database, delta_description, (target.report, uuids))
result["new_subject_uuid"] = subject_copy_uuid
return result
@bottle.post("/api/v3/subject/<subject_uuid>/move/<target_report_uuid>", permissions_required=[EDIT_REPORT_PERMISSION])
def post_move_subject(subject_uuid: SubjectId, target_report_uuid: ReportId, database: Database):
"""Move the subject to another report."""
data_model = latest_datamodel(database)
reports = latest_reports(database)
source = SubjectData(data_model, reports, subject_uuid)
target = ReportData(data_model, reports, target_report_uuid)
target.report["subjects"][subject_uuid] = source.subject
del source.report["subjects"][subject_uuid]
delta_description = (
f"{{user}} moved the subject '{source.subject_name}' from report "
f"'{source.report_name}' to report '{target.report_name}'."
)
source_uuids = [source.report_uuid, subject_uuid]
target_uuids = [target_report_uuid, subject_uuid]
return insert_new_report(database, delta_description, (source.report, source_uuids), (target.report, target_uuids))
@bottle.delete("/api/v3/subject/<subject_uuid>", permissions_required=[EDIT_REPORT_PERMISSION])
def delete_subject(subject_uuid: SubjectId, database: Database):
"""Delete the subject."""
data_model = latest_datamodel(database)
reports = latest_reports(database)
data = SubjectData(data_model, reports, subject_uuid)
del data.report["subjects"][subject_uuid]
delta_description = f"{{user}} deleted the subject '{data.subject_name}' from report '{data.report_name}'."
uuids = [data.report_uuid, subject_uuid]
return insert_new_report(database, delta_description, (data.report, uuids))
@bottle.post(
"/api/v3/subject/<subject_uuid>/attribute/<subject_attribute>", permissions_required=[EDIT_REPORT_PERMISSION]
)
def post_subject_attribute(subject_uuid: SubjectId, subject_attribute: str, database: Database):
"""Set the subject attribute."""
data_model = latest_datamodel(database)
reports = latest_reports(database)
data = SubjectData(data_model, reports, subject_uuid)
value = dict(bottle.request.json)[subject_attribute]
old_value = data.subject.get(subject_attribute) or ""
if subject_attribute == "position":
old_value, value = move_item(data, value, "subject")
else:
data.subject[subject_attribute] = value
if old_value == value:
return dict(ok=True) # Nothing to do
delta_description = (
f"{{user}} changed the {subject_attribute} of subject "
f"'{data.subject_name}' in report '{data.report_name}' from '{old_value}' to '{value}'."
)
uuids = [data.report_uuid, subject_uuid]
return insert_new_report(database, delta_description, (data.report, uuids))
@bottle.get("/api/v3/subject/<subject_uuid>/measurements", authentication_required=False)
def get_subject_measurements(subject_uuid: SubjectId, database: Database):
"""Return all measurements for the subjects within the last 28 weeks."""
metric_uuids: list[MetricId] = metrics_of_subject(database, subject_uuid)
report_timestamp = datetime.fromisoformat(report_date_time()) if report_date_time() != "" else datetime.now()
min_datetime = report_timestamp - timedelta(weeks=28)
min_iso_timestamp = min_datetime.isoformat()
return dict(
measurements=list(
measurements_by_metric(
database, *metric_uuids, min_iso_timestamp=min_iso_timestamp, max_iso_timestamp=report_date_time()
)
)
)
|
py | b416db3ed471ba68dd666d89686012e433e327ee | #!/usr/bin/env python3
# PYTHON METADATA BELLOW
'''
File name: lab-1.py
Author: Alexander Rymdeko-Harvey
Date created: 12/12/2018
Date last modified: 12/12/2018
Python Version: 3.6.5
License:
Copyright (c) 2018 ⭕Alexander Rymdeko-Harvey & Obscurity Labs LLC.
This file is part of Python Fundementals (3.6)
(see https://github.com/obscuritylabs).
License: 3-clause BSD, see https://opensource.org/licenses/BSD-3-Clause
'''
# END OF METADATA
print("="*24+" LAB-RESULTS "+"="*24)
# IMPORTS BELLOW
# Statments we use to open different modules that people have built.
# This allows code to be reusable, which is a key factor to good
# programming and performance.
import os
# END OF IMPORTS
# START OF LAB CODE BLOCK
# END OF LAB CODE BLOCK
# ----- DO NOT EDIT BELLOW THIS LINE -----
from colorama import init
from termcolor import colored
import os
init(autoreset=True)
# print test harness
print("="*20+" TEST-HARNESS-RESULTS "+"="*20)
# check if program name is defined
def test_hello():
try:
hello_world()
print("hello world is present: %s" % (colored('PASS', 'green')))
except Exception as e:
print("hello world is NOT present: %s" % (colored('FAIL', 'red')))
if __name__ == "__main__":
test_hello()
|
py | b416dbd912e8e8a5069e3656f718ac2429bb1e63 | # SPDX-License-Identifier: Apache-2.0
#
# http://nexb.com and https://github.com/nexB/scancode.io
# The ScanCode.io software is licensed under the Apache License version 2.0.
# Data generated with ScanCode.io is provided as-is without warranties.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# Data Generated with ScanCode.io is provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode.io should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
#
# ScanCode.io is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode.io for support and download.
import csv
import json
from django.core.serializers.json import DjangoJSONEncoder
from scancodeio import SCAN_NOTICE
from scancodeio import __version__ as scancodeio_version
from scanpipe.api.serializers import CodebaseResourceSerializer
from scanpipe.api.serializers import DiscoveredPackageSerializer
from scanpipe.api.serializers import RunSerializer
def queryset_to_csv(project, queryset, fieldnames):
"""
Create a csv file from the provided `queryset`.
The fields to include as columns and their order are controlled by the
`fieldnames` list.
The output file is created in the `project` output/ directory.
"""
model_name = queryset.model._meta.model_name
output_file = project.get_output_file_path(f"{model_name}", "csv")
with open(output_file, "w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames)
writer.writeheader()
for record in queryset.iterator():
record_dict = {field: getattr(record, field) for field in fieldnames}
writer.writerow(record_dict)
return output_file
def to_csv(project):
"""
Generate results output for the provided `project` as csv format.
Since the csv format does not support multiple tabs, one file is created
per object type.
The output files are created in the `project` output directory.
"""
data_sources = [
(project.discoveredpackages.all(), DiscoveredPackageSerializer),
(project.codebaseresources.without_symlinks(), CodebaseResourceSerializer),
]
for queryset, serializer in data_sources:
fieldnames = list(serializer().get_fields().keys())
queryset_to_csv(project, queryset, fieldnames)
class JSONResultsGenerator:
"""
Return the `project` JSON results as a Python generator.
Use this class to stream the results from the database to the client browser
without having to load everything in memory first.
Note that the Django Serializer class can output to a stream but cannot be
sent directly to a StreamingHttpResponse.
The results would have to be streamed to a file first, then iterated by the
StreamingHttpResponse, which do not work great in a HTTP request context as
the request can timeout while the file is generated.
"""
def __init__(self, project):
self.project = project
def __iter__(self):
yield "{\n"
yield from self.serialize(label="headers", generator=self.get_headers)
yield from self.serialize(label="packages", generator=self.get_packages)
yield from self.serialize(label="files", generator=self.get_files, latest=True)
yield "}"
def serialize(self, label, generator, latest=False):
yield f'"{label}": [\n'
prefix = ",\n"
first = True
for entry in generator(self.project):
if first:
first = False
else:
entry = prefix + entry
yield entry
yield "]\n" if latest else "],\n"
@staticmethod
def encode(data):
return json.dumps(data, indent=2, cls=DjangoJSONEncoder)
def get_headers(self, project):
runs = project.runs.all()
runs = RunSerializer(runs, many=True, exclude_fields=("url", "project"))
headers = {
"tool_name": "scanpipe",
"tool_version": scancodeio_version,
"notice": SCAN_NOTICE,
"uuid": project.uuid,
"created_date": project.created_date,
"input_files": project.input_files,
"runs": runs.data,
"extra_data": project.extra_data,
}
yield self.encode(headers)
def get_packages(self, project):
packages = project.discoveredpackages.all()
for obj in packages.iterator():
yield self.encode(DiscoveredPackageSerializer(obj).data)
def get_files(self, project):
resources = project.codebaseresources.without_symlinks()
for obj in resources.iterator():
yield self.encode(CodebaseResourceSerializer(obj).data)
def to_json(project):
"""
Generate results output for the provided `project` as JSON format.
The output file is created in the `project` output/ directory.
"""
results_generator = JSONResultsGenerator(project)
output_file = project.get_output_file_path("results", "json")
with output_file.open("w") as file:
for chunk in results_generator:
file.write(chunk)
return output_file
|
py | b416dcb9efd5c967492fa2401c2ed3ca1372667b | from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
class TestDuplicates(object):
config = """
tasks:
duplicates_accept:
mock:
- {title: 'entry 1', url: 'http://foo.bar', another_field: 'bla'}
- {title: 'entry 2', url: 'http://foo.baz', another_field: 'bla'}
duplicates:
field: another_field
action: accept
duplicates_reject:
mock:
- {title: 'entry 1', url: 'http://foo.bar', another_field: 'bla'}
- {title: 'entry 2', url: 'http://foo.baz', another_field: 'bla'}
duplicates:
field: another_field
action: reject
duplicates_missing_field:
mock:
- {title: 'entry 1', url: 'http://foo.bar', another_field: 'bla'}
- {title: 'entry 2', url: 'http://foo.baz', another_field: 'bla'}
duplicates:
field: foo
action: reject
"""
def test_duplicates_accept(self, execute_task):
task = execute_task('duplicates_accept')
assert len(task.accepted) == 2
assert len(task.rejected) == 0
def test_duplicates_reject(self, execute_task):
task = execute_task('duplicates_reject')
assert len(task.accepted) == 0
assert len(task.rejected) == 1
def test_duplicates_missing_field(self, execute_task):
task = execute_task('duplicates_missing_field')
assert len(task.accepted) == 0
assert len(task.rejected) == 0
|
py | b416dfc0324baf4ef7a563b84e1243e5e4c67c0a | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# To register app to PyPI:
# python setup.py register -r pypi
#
# NOTE: To update PyPI, tag the current release:
#
# First increment cache_tools/__init__.py
# Then:
# > git tag x.y.z -m "Version bump for PyPI"
# > git push --tags origin master
# Then:
# > python setup.py sdist upload
#
from setuptools import setup, find_packages
from cmsplugin_sections import __version__
INSTALL_REQUIRES = [
'Django>=1.7.0',
'django-cms>=3.0.6',
]
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Communications',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Programming Language :: Python :: 2.7',
]
setup(
name='cmsplugin-sections',
version=__version__,
description='Plugins to aide creating “single page scroller” websites.',
author='Martin Koistinen',
author_email='[email protected]',
url='https://github.com/mkoistinen/cmsplugin-sections',
packages=find_packages(),
install_requires=INSTALL_REQUIRES,
license='LICENSE.txt',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
long_description=open('README.md').read(),
include_package_data=True,
zip_safe=False,
download_url='https://github.com/mkoistinen/cmsplugin-sections/tarball/0.0.1',
) |
py | b416e0cfb112fd81ee0e17249477b9561b9a2a23 | """AVM FRITZ!Box connectivity sensor."""
from __future__ import annotations
from dataclasses import dataclass
import logging
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .common import AvmWrapper, FritzBoxBaseEntity
from .const import DOMAIN, MeshRoles
_LOGGER = logging.getLogger(__name__)
@dataclass
class FritzBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describes Fritz sensor entity."""
exclude_mesh_role: MeshRoles = MeshRoles.SLAVE
SENSOR_TYPES: tuple[FritzBinarySensorEntityDescription, ...] = (
FritzBinarySensorEntityDescription(
key="is_connected",
name="Connection",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
),
FritzBinarySensorEntityDescription(
key="is_linked",
name="Link",
device_class=BinarySensorDeviceClass.PLUG,
entity_category=EntityCategory.DIAGNOSTIC,
),
FritzBinarySensorEntityDescription(
key="firmware_update",
name="Firmware Update",
device_class=BinarySensorDeviceClass.UPDATE,
entity_category=EntityCategory.DIAGNOSTIC,
exclude_mesh_role=MeshRoles.NONE,
),
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up entry."""
_LOGGER.debug("Setting up FRITZ!Box binary sensors")
avm_wrapper: AvmWrapper = hass.data[DOMAIN][entry.entry_id]
entities = [
FritzBoxBinarySensor(avm_wrapper, entry.title, description)
for description in SENSOR_TYPES
if (description.exclude_mesh_role != avm_wrapper.mesh_role)
]
async_add_entities(entities, True)
class FritzBoxBinarySensor(FritzBoxBaseEntity, BinarySensorEntity):
"""Define FRITZ!Box connectivity class."""
def __init__(
self,
avm_wrapper: AvmWrapper,
device_friendly_name: str,
description: BinarySensorEntityDescription,
) -> None:
"""Init FRITZ!Box connectivity class."""
self.entity_description = description
self._attr_name = f"{device_friendly_name} {description.name}"
self._attr_unique_id = f"{avm_wrapper.unique_id}-{description.key}"
super().__init__(avm_wrapper, device_friendly_name)
def update(self) -> None:
"""Update data."""
_LOGGER.debug("Updating FRITZ!Box binary sensors")
if self.entity_description.key == "firmware_update":
self._attr_is_on = self._avm_wrapper.update_available
self._attr_extra_state_attributes = {
"installed_version": self._avm_wrapper.current_firmware,
"latest_available_version": self._avm_wrapper.latest_firmware,
}
if self.entity_description.key == "is_connected":
self._attr_is_on = bool(self._avm_wrapper.fritz_status.is_connected)
elif self.entity_description.key == "is_linked":
self._attr_is_on = bool(self._avm_wrapper.fritz_status.is_linked)
|
py | b416e2582ac280cb13b445ce4a6774286f7d8688 | # Copyright (c) 2015, Julian Straub <[email protected]>
# Licensed under the MIT license. See the license file LICENSE.
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
import cv2
import scipy.io
import subprocess as subp
import os, re, time
import argparse
import mayavi.mlab as mlab
from vpCluster.rgbd.rgbdframe import RgbdFrame
from vpCluster.manifold.sphere import Sphere
from js.utils.config import Config2String
from js.utils.plot.pyplot import SaveFigureAsImage
parser = argparse.ArgumentParser(description = 'DpMM modeling and viewer')
parser.add_argument('-s','--start', type=int, default=0, help='start image Nr')
parser.add_argument('-e','--end', type=int, default=0, help='end image Nr')
parser.add_argument('-K0', type=int, default=1, help='initial number of MFs')
parser.add_argument('-b','--base', default='DpNiwSphereFull', help='base distribution/algorithm')
parser.add_argument('-nyu', action='store_true', help='switch to process the NYU dataset')
args = parser.parse_args()
cfg=dict()
cfg['path'] = '/home/jstraub/workspace/research/vpCluster/data/nyu2/'
cfg['path'] = '/home/jstraub/workspace/research/vpCluster/data/'
cfg['path'] = '~/workspace/research/vpCluster/data/'
cfg['outputPath'] = '/data/vision/scratch/fisher/jstraub/dpMM/'
cfg['path'] = '/data/vision/scratch/fisher/jstraub/dpMM/nyu2/'
cfg['path'] = '../data/'
#cfg['base'] = 'DpNiwSphereFull';
#cfg['base'] = 'spkm';
#cfg['base'] = 'DpNiw';
cfg['base'] = args.base;
cfg['K'] = args.K0
cfg['T'] = 600
cfg['delta'] = 18.
cfg['nu'] = 3 + 10000.0
seed = 214522
reRun = True
algo = 'guy' #'sobel'#'guy'
#mode = ['multi']
mode = ['multiFromFile']
mode = ['multi']
mode = ['single','disp']
if args.nyu:
mode = ['multiFromFile']
if 'single' in mode:
name = '2013-09-27.10:33:47' #
name = '2013-10-01.19:25:00' # my room
name = 'living_room_0000'
name = 'study_room_0004_uint16'
name = 'study_room_0005_uint16'
name = 'home_office_0001_uint16'
name = '2boxes_1'
name = 'kitchen_0004'
name = 'office_0008_uint16'
name = '3boxes_moreTilted_0' #segments really well - has far distance!!! [k=4]
name = 'table_1'
name = 'kitchen_0016_252'
name = 'MIT_hallway_0'
name = 'MIT_hallway_1'
names = [name]
elif 'multi' in mode:
names = []
for root,dirs,files in os.walk(cfg['path']):
for f in files:
ff = re.split('_',f)
if ff[-1] == 'd.png':
names.append('_'.join(ff[0:-1]))
print 'adding {}'.format(names[-1])
## names = ['home_office_0001_358','3boxes_moreTilted_0','couches_0','MIT_hallway_1','stairs_5','office_0008_17','stairs_5','MIT_hallway_0','kitchen_0007_132']
names = ['kitchen_0015_252', 'living_room_0058_1301', 'bedroom_0085_1084', 'kitchen_0033_819', 'conference_room_0002_342', 'kitchen_0048_879']
elif 'multiFromFile' in mode:
cfg['evalStart'] = args.start
cfg['evalEnd'] = args.end
indexPath = '/data/vision/fisher/data1/nyu_depth_v2/index.txt'
cfg['path'] = '/data/vision/fisher/data1/nyu_depth_v2/extracted/'
cfg['outputPath'] = '/data/vision/scratch/fisher/jstraub/dpMM/nyu2/'
names =[]
with open(indexPath) as f:
allNames = f.read().splitlines() #readlines()
for i in range(len(allNames)):
if cfg['evalStart'] <= i and i <cfg['evalEnd']:
names.append(allNames[i])
print '@{}: {}'.format(len(names)-1,names[-1])
print names
else:
print 'no files in list'
exit(1)
if 'disp' in mode:
figm0 = mlab.figure(bgcolor=(1,1,1))
figm1 = mlab.figure(bgcolor=(1,1,1))
fig0 = plt.figure()
rndInds = np.random.permutation(len(names))
for ind in rndInds:
name = names[ind]
outName = cfg['outputPath']+name+'_'+Config2String(cfg).toString()
if 'multiFromFile' in mode and os.path.isfile(outName):
print 'skipping '+outName+' since it is already existing'
continue;
print 'processing '+cfg['path']+name
rgbd = RgbdFrame(460.0) # correct: 540
rgbd.load(cfg['path']+name)
if 'disp' in mode:
rgbd.showRgbd(fig=fig0)
plt.show()
# rgbd.showPc()
# mlab.show(stop=True)
rgbd.getPc()
print np.max(rgbd.d)
nAll = rgbd.getNormals(algo=algo)
n = nAll[rgbd.mask,:].T
print n.shape
D = n.shape[0]
N = n.shape[1]
dataPath = cfg['path']+name+'_normals.csv'
np.savetxt(dataPath,n)
alpha = np.ones(cfg['K'])*1. #*100.;
if cfg['base'] == 'NiwSphereUnifNoise':
alpha[cfg['K']-1] = 1.0;
nu = cfg['nu']#+N/10.
if cfg['base'] == 'DpNiw':
kappa = 1.0
thetaa = np.zeros(D)
Delta = nu * (cfg['delta']*np.pi)/180.0 * np.eye(D)
params = np.array([nu,kappa])
params = np.r_[params,thetaa.ravel(),Delta.ravel()]
else:
Delta = nu* (cfg['delta']*np.pi)/180.0 * np.eye(D-1)
params = np.array([nu])
params = np.r_[params,Delta.ravel()]
#args = ['../build/dpSubclusterSphereGMM',
args = ['../build/dpmmSampler',
'-N {}'.format(N),
'-D {}'.format(D),
'-K {}'.format(cfg['K']),
'-T {}'.format(cfg['T']),
'--alpha '+' '.join([str(a) for a in alpha]),
'--base '+cfg['base'],
'--seed {}'.format(seed),
'-i {}'.format(dataPath),
'-o {}'.format(outName+'.lbl'),
'--params '+' '.join([str(p) for p in params])]
if reRun:
print ' '.join(args)
print ' --------------------- '
time.sleep(1);
err = subp.call(' '.join(args),shell=True)
if err:
print 'error when executing'
if not 'multiFromFile' in mode:
raw_input()
continue
print outName+'.lbl'
z = np.loadtxt(outName+'.lbl',delimiter=' ').astype(int)
logLike = np.loadtxt(outName+'.lbl_jointLikelihood.csv',delimiter=' ')
T = z.shape[0]
for t in range(T):
print t,logLike[t],np.bincount(z[t,:])
for t in range(T):
print t,logLike[t],np.bincount(z[t,:]/2)
if cfg['base'] in ['DpNiw','DpNiwSphereFull']:
zz = z[-1,:]/2
t = T-1
K = (np.max(z[-1,:])+1)/2
means = np.loadtxt(outName+'.lbl_means.csv')
for k in range(K):
for j in range(k+1,K):
muk = means[t*3:(t+1)*3,3*k+2]
muj = means[t*3:(t+1)*3,3*j+2]
print np.arccos(muk.dot(muj))*180./np.pi
if np.arccos(muk.dot(muj))*180./np.pi < 5:
zz[zz==j] = k
elif cfg['base'] in ['spkm', 'kmeans']:
zz = z[-1,:]
t = T-1
K = (np.max(z[-1,:])+1)
figL = plt.figure()
I = np.zeros(rgbd.mask.shape)
I[rgbd.mask] = zz + 1
plt.imshow(I,cmap=cm.spectral,figure = figL)
# plt.imshow(I,cmap=cm.hsv,figure = figL)
SaveFigureAsImage(outName+'lbls.png',figL)
if 'disp' in mode:
figL.show()
# plt.show()
figm2 = rgbd.showWeightedNormals(algo=algo)
fig = rgbd.showAxialSigma()
fig = rgbd.showLateralSigma(theta=30.0)
#fig = rgbd.bilateralDepthFiltering(theta=30.0)
figm0 = rgbd.showPc(showNormals=True,algo=algo)
figm1 = rgbd.showNormals()
figm2 = rgbd.showNormals(as2D=True); figm2.show()
M = Sphere(2)
M.plotFanzy(figm1,1.0)
mlab.show(stop=True)
elif 'multiFromFile' in mode and 'disp' in mode:
figm0 = rgbd.showPc(figm=figm0,showNormals=True,algo=algo)
figm1 = rgbd.showNormals(figm=figm1)
M = Sphere(2)
M.plotFanzy(figm1,1.0)
mlab.show(stop=True)
mlab.clf(figm0)
mlab.clf(figm1)
plt.close(figL)
|
py | b416e25ecb93e3ae159e0ff650d143019edc6586 | from users.models import Profile
from django.test import TestCase
from rest_framework.test import APIClient,APITestCase
from .serializers import ProfileSerializer
from rest_framework.views import status
from django.urls import reverse
# Create your tests here.
class BaseViewTest(APITestCase):
client = APIClient()
@staticmethod
def create_profile(
user="",
bio="",
image="",
):
if user != "" and bio != "" and image != "" :
Profile.objects.create.objects(user=user,bio=bio,image=image)
def setUp(self):
self.submit_site('Mabel', 'Keep life simple',
'cloudlink.field',
)
class GetProjectTest(BaseViewTest):
def test_get_project(self):
"""
This test ensure that all songs added in the setUp method
exist when we make a GET request to the project/endpoint
"""
response = self.client.get(
reverse('Projects', kwargs={'version': 'v1'}))
# fetch the data from db
expected = Profile.objects.all()
serialized = ProfileSerializer(expected, many=True)
self.assertEqual(response.data, serialized.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
py | b416e262a7e2400700325cc5ca63db9331e00516 | # TODO: Refactor this to get information from existing database
POPULAR_BEER_STYLES = [
# Sorted by COUNT DESC
'IPA',
'Pale Ale',
'Lager',
'Stout',
'Belgian Ale',
'Golden Ale',
'Sour Ale',
'Gose',
'Session IPA',
'Hazy Ale',
'Wheat Ale',
'Imperial IPA',
'Pilsner',
'Ale',
'Fruit Beer',
'Porter',
'Mead',
'Tripel',
'New England IPA',
'Amber Ale',
'Wheat',
'Hazy IPA',
'Quadrupel',
'Double IPA',
'NEIPA',
'India Pale Ale',
'Dark Ale',
'Imperial Stout',
'Session Ale',
]
POPULAR_BEER_BRANDS = [
# Sorted by COUNT DESC
'Lost Coast',
'Garage Project',
'Holgate',
'Kona',
'Hawkers',
'Eagle Bay',
'Nail',
'Brewlander',
'Deschutes',
'Colonial',
'Avery',
'Sierra Nevada',
'Little Creatures',
'Great Divide',
'Kaiju',
'Green Flash',
'Coronado',
'Chouffe',
'Collective Arts Brewing',
'Deschutes Brewery',
'Trouble Brewing',
'Mikkeller',
'New Belgium',
'Founders',
'Stone',
'Heart of Darkness',
'Lagunitas',
'Lindemans',
'Golden Road',
'Rodenbach',
'Rocky Ridge',
'Alpine',
'The Bruery',
'Blasta Brewing',
'Thirsty',
'4 Pines',
'Heretic',
'Schneider Weisse',
'Deeds Brewing',
'Rogue',
'Pirate Life',
'BrewDog',
'Duvel',
'Anderson Valley',
'Brothers',
'Playground Brewery',
'Wayward',
'Off Day Beer Company',
'Otherside',
'Vedett',
'Lion City Meadery',
'Mother Earth',
'Stockade',
'Thornbridge',
'Mike Hess',
'Vocation Brewery',
'Pizza Port',
'Brewerkz',
'Huyghe',
'Rochefort',
'Magic Rock Brewing',
'Bosteels',
'Young Master',
'Wychwood',
'Behemoth Brewing',
'Feral Brewing Co',
'Westmalle',
'1925',
'St Bernardus',
'Maui',
'Salt Beer Factory',
'Gage Roads',
'Atomic',
'Lion Brewery',
'Chillhops Brewing Co.',
'Ska',
'Van Honsebrouck',
'Van Steenberge',
'Erdinger',
'La Trappe',
'Adroit Theory Brewing',
'Delirium',
'Gorilla',
'Levante',
'Gosnells',
'Devil\'s Peak',
'Sweetwater Brewery',
'Leffe',
'Thatchers Cider',
'ParrotDog',
'Chimay',
'Paulaner',
'Archipelago',
'3 Fonteinen',
'Co-conspirators Brewing Co',
'Deep Creek',
'Affligem',
'Dogfish Head',
'Duncan\'s Brewing Company',
'Brouwerij',
'Barossa Valley Brewing',
'Hoegaarden',
'Epic Brewing',
'Two Tribes',
'Omnipollo',
'Rye & Pint',
'North Coast Brewing',
'Hitachino Nest',
'Drake\'s',
'Rogue Ales',
'Gweilo',
'Veltins',
'Liberty Brewing',
'Boon Oude Geuze',
'Moon Dog Brewery',
'Orval',
'Pabst',
'Brooklyn',
'Fourpure Brewing Co',
'Dupont',
'Estrella',
'Samuel Adams',
'Coedo',
]
POPULAR_BEERS = [
'Kona Big Wave Hawaiian Golden Ale',
'Green Flash Saturhaze Session Hazy IPA',
'Kona Longboard Hawaiian Lager',
'Lost Coast Peanut Butter Chocolate Milk Stout',
'Green Flash Tropical DNA Hazy IPA',
'New Belgium Voodoo Ranger Juicy Haze IPA',
'Lost Coast Hazy IPA',
'Duvel Tripel Hop Belgian IPA',
'Rodenbach Classic Belgian Sour Flemish Ale',
'Rogue Batsquatch Hazy IPA',
'Eagle Bay Australian India Pale Lager',
'Chouffe Blanche Belgian Wheat Ale',
'Coronado Pineapple Farm Hazy IPA',
'Hawkers Australian Hazy Pale Ale',
'Colonial Australian Southwest Sour Ale',
'Brewlander Euphoria Double Dry Hopped Hazy IPA',
'Sierra Nevada Hazy Little Thing IPA',
'Levante x Team Secret A.F.K. Gamer\'s Hazy IPA',
'Little Creatures Dog Days Australian Session IPA',
'Stone Tangerine Express Hazy IPA',
'Golden Road Hazy Los Angeles IPA',
'Great Divide Hercules God style Double IPA',
'Kaiju Aftermath Ultra Hopped Double IPA',
'Mike Hess Hop Cloud Hazy IPA',
'New Belgium Voodoo Ranger IPA',
'Kona Hanalei Hawaiian Session IPA with Passion Fruit Orange Guava',
'New Belgium Fat Tire Amber Ale',
'Nail Super VPA Imperial IPA with Lemon Lime Rind',
'Mike Hess Habitus Double IPA',
'Dogfish Head 90 Minute Imperial IPA',
'Coronado Never Better Double IPA',
'Sierra Nevada Fantastic Haze Hazy Imperial IPA',
'Deschutes Fresh Haze Citrus Hazy IPA',
'Eagle Bay Australian Pale Ale',
'Lost Coast Revenant IPA',
'Colonial Australian Pale Ale',
'Founders All Day Session IPA',
'Pizza Port Ponto Session IPA',
'Lost Coast Sharkinator Session Wheat IPA',
'Lost Coast Tangerine Wheat Ale',
'Eagle Bay Australian Lager',
'Great Divide Car Camper Hazy Pale Ale',
'Avery Hazyish Hazy IPA',
'Colonial Australian IPA',
'Brewlander Courage Double IPA',
'Lagunitas Daytime Lo Cal Low Carb Session IPA',
'Stone FML Fear. Movie. Lions. Hazy Double IPA',
'Rodenbach Grand Cru Oak Aged Belgian Sour Flemish Ale',
'Great Divide Fastpack Lo Cal Low Carb Hazy IPA',
'Lagunitas Hazy Wonder Hazy IPA',
'Green Flash Soul Style IPA',
'Vedett Extra Pilsner Belgian Lager',
'Vedett Extra Ordinary Belgian IPA',
'Founders Centennial IPA',
'Wychwood Hobgoblin Gold English Golden Ale',
'Wychwood Hobgoblin Ruby English Amber Ale',
]
RESULTS_NOT_FOUND_GIFS = [
'https://media.giphy.com/media/VHrSW9jIYrlat3PwIB/giphy.gif', # Morty
'https://media.giphy.com/media/1USKMDPjuH4ovL7J5h/giphy.gif', # Rick
'https://media.giphy.com/media/TGKwNUQ11E9LnACDLe/giphy.gif', # Morty
'https://media.giphy.com/media/WTiHD79xbtd53CgHpB/giphy.gif', # Mr. Meeseeks
'https://media.giphy.com/media/QvH2Em7M2kb04VEOG2/giphy.gif', # Jerry
'https://media.giphy.com/media/M9gBSOsxQW3od0GwfW/giphy.gif', # Summer
]
GA_JS_FILE = "https://www.googletagmanager.com/gtag/js?id=G-TLTRLY136L"
GA_JS_CODE = """
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'G-YW0GRZJ8MT');
"""
|
py | b416e2e05b81f527a04236c66118380957139ebe | import networkx.algorithms.assortativity.tests.base_test
import networkx.algorithms.assortativity.tests.test_connectivity
import networkx.algorithms.assortativity.tests.test_correlation
import networkx.algorithms.assortativity.tests.test_mixing
import networkx.algorithms.assortativity.tests.test_neighbor_degree
import networkx.algorithms.assortativity.tests.test_pairs
import pytest
from graphscope.nx.utils.compat import import_as_graphscope_nx
# N.B import base_test at begin
import_as_graphscope_nx(networkx.algorithms.assortativity.tests.base_test,
decorators=pytest.mark.usefixtures("graphscope_session"))
import_as_graphscope_nx(networkx.algorithms.assortativity.tests.test_connectivity,
decorators=pytest.mark.usefixtures("graphscope_session"))
import_as_graphscope_nx(networkx.algorithms.assortativity.tests.test_neighbor_degree,
decorators=pytest.mark.usefixtures("graphscope_session"))
import_as_graphscope_nx(networkx.algorithms.assortativity.tests.test_correlation,
decorators=pytest.mark.usefixtures("graphscope_session"))
import_as_graphscope_nx(networkx.algorithms.assortativity.tests.test_mixing,
decorators=pytest.mark.usefixtures("graphscope_session"))
import_as_graphscope_nx(networkx.algorithms.assortativity.tests.test_pairs,
decorators=pytest.mark.usefixtures("graphscope_session"))
|
py | b416e2ed685ffe20673313f7dd0c563dc12d35c6 | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
maxEventsToPrint = cms.untracked.int32(1),
pythiaPylistVerbosity = cms.untracked.int32(1),
filterEfficiency = cms.untracked.double(1.0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
comEnergy = cms.double(13000.),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'SoftQCD:nonDiffractive = on',
'SoftQCD:singleDiffractive = on',
'SoftQCD:doubleDiffractive = on',
),
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters',
)
)
)
|
py | b416e351f140c68c6edd25bca2938209d9b83443 | from invenio_records import Record
class AllNrRecord(Record):
pass
|
py | b416e388b8981ab2938a32913ff65ea272167e54 | from django.db import models
from django.contrib.auth.models import (
AbstractBaseUser,
BaseUserManager,
PermissionsMixin,
)
class UserManager(BaseUserManager):
def create_user(self, email, password=None, **extra_fields):
"""Creates and saves a new user"""
if not email:
raise ValueError("Users must have an email addresss")
user = self.model(email=self.normalize_email(email), **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password):
"""Create and saves a new superuser"""
user = self.create_user(email, password)
user.is_staff = True
user.is_superuser = True
user.save(using=self._db)
return user
class User(AbstractBaseUser, PermissionsMixin):
"""Custom user modle that supports using email instead of username"""
email = models.EmailField(max_length=255, unique=True)
name = models.CharField(max_length=255)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
objects = UserManager()
USERNAME_FIELD = "email"
|
py | b416e52d80f13eeee53207f2350cb96cb46c6008 | from django.db import models
from ..funcionarios.models import Funcionario
class Documento(models.Model):
descricao = models.CharField(max_length=100)
pertence = models.ForeignKey(
Funcionario, on_delete=models.PROTECT)
class Meta:
verbose_name = 'Documento'
verbose_name_plural = 'Documentos'
ordering = ['descricao']
def __str__(self):
return self.descricao |
py | b416e55dca25e9f273d73e87b9b547a9d8ead558 | # -*- coding: utf-8 -*-
#
# Copyright 2019 Klimaat
from __future__ import division
import datetime
import numpy as np
import matplotlib.pyplot as plt
def join_date(y=1970, m=1, d=1, hh=0, mm=0, ss=0):
"""
Join date/time components into datetime64 object
"""
y = (np.asarray(y) - 1970).astype("<M8[Y]")
m = (np.asarray(m) - 1).astype("<m8[M]")
d = (np.asarray(d) - 1).astype("<m8[D]")
hh = np.asarray(hh).astype("<m8[h]")
mm = np.asarray(mm).astype("<m8[m]")
ss = np.asarray(ss).astype("<m8[s]")
return y + m + d + hh + mm + ss
def split_date(dates):
"""
Split datetime64 dates into year, month, day components.
"""
y = dates.astype("<M8[Y]").astype(int) + 1970
m = dates.astype("<M8[M]").astype(int) % 12 + 1
d = (dates - dates.astype("<M8[M]")).astype("<m8[D]").astype(int) + 1
return y, m, d
def split_time(dates):
"""
Split datetime64 dates into hour, minute, second components.
"""
hh = (dates - dates.astype("<M8[D]")).astype("<m8[h]").astype(int)
mm = (dates - dates.astype("<M8[h]")).astype("<m8[m]").astype(int)
ss = (dates - dates.astype("<M8[m]")).astype("<m8[s]").astype(int)
return hh, mm, ss
def day_of_year(dates, snap=True):
"""
Calculate the day of the year (0-365/366)
"""
dt = np.asarray(dates) - dates.astype("<M8[Y]")
if snap:
# Provide value at noon (integer)
# Jan 1st anytime = 1
return dt.astype("<m8[D]").astype(int) + 1
else:
# Provide value including fractional part (float)
# Jan 1st at 00:00 = 0, Jan 1st at noon = 0.5
return dt.astype("<m8[s]").astype(int) / 86400
def julian_day(dates):
"""
Julian day calculator
"""
# Get Julian Day number
y, m, d = split_date(dates)
a = (14 - m) // 12
y += 4800 - a
m += 12 * a - 3
jd = d + ((153 * m + 2) // 5) + 365 * y + y // 4 - y // 100 + y // 400 - 32045
# Get fractional day (noon=0)
hh, mm, ss = split_time(dates)
fd = (hh - 12) / 24 + mm / 1440 + ss / 86400
return jd, fd
def orbit_ashrae(utc):
"""
Calculate solar parameters based on ASHRAE methodology.
Ref. ASHRAE HOF 2017, Chap 14
"""
# Day of year
n = day_of_year(utc, snap=True)
# Declination (eqn. 10, radians)
decl = np.radians(23.45 * np.sin(2 * np.pi * (n + 284) / 365))
# Equation of time (eqns 5 & 6, min)
gamma = 2 * np.pi * (n - 1) / 365
eqnOfTime = 2.2918 * (
0.0075
+ 0.1868 * np.cos(gamma)
- 3.2077 * np.sin(gamma)
- 1.4615 * np.cos(2 * gamma)
- 4.089 * np.sin(2 * gamma)
)
# Convert from minutes to radians
eqnOfTime *= np.pi / (60 * 12)
# Solar constant correction
solFactor = 1 + 0.033 * np.cos(np.radians(360 * (n - 3) / 365))
return np.sin(decl), np.cos(decl), eqnOfTime, solFactor
def orbit_energyplus(utc):
"""
Calculate solar coefficients based on EnergyPlus
Ref. WeatherManager.cc, function CalculateDailySolarCoeffs
"""
# Day of year
n = day_of_year(utc, snap=True)
# Day Angle
D = 2 * np.pi * n / 366.0
sinD = np.sin(D)
cosD = np.cos(D)
# Calculate declination sines & cosines
sinDec = (
0.00561800
+ 0.0657911 * sinD
- 0.392779 * cosD
+ 0.00064440 * (sinD * cosD * 2.0)
- 0.00618495 * (cosD ** 2 - sinD ** 2)
- 0.00010101 * (sinD * (cosD ** 2 - sinD ** 2) + cosD * (sinD * cosD * 2.0))
- 0.00007951 * (cosD * (cosD ** 2 - sinD ** 2) - sinD * (sinD * cosD * 2.0))
- 0.00011691 * (2.0 * (sinD * cosD * 2.0) * (cosD ** 2 - sinD ** 2))
+ 0.00002096 * ((cosD ** 2 - sinD ** 2) ** 2 - (sinD * cosD * 2.0) ** 2)
)
cosDec = np.sqrt(1 - sinDec ** 2)
# Equation of time (hours)
eqnOfTime = (
0.00021971
- 0.122649 * sinD
+ 0.00762856 * cosD
- 0.156308 * (sinD * cosD * 2.0)
- 0.0530028 * (cosD ** 2 - sinD ** 2)
- 0.00388702 * (sinD * (cosD ** 2 - sinD ** 2) + cosD * (sinD * cosD * 2.0))
- 0.00123978 * (cosD * (cosD ** 2 - sinD ** 2) - sinD * (sinD * cosD * 2.0))
- 0.00270502 * (2.0 * (sinD * cosD * 2.0) * (cosD ** 2 - sinD ** 2))
- 0.00167992 * ((cosD ** 2 - sinD ** 2) ** 2 - (sinD * cosD * 2.0) ** 2)
)
# Convert to radians
eqnOfTime = np.pi * eqnOfTime / 12
# Solar constant correction factor
solFactor = 1.000047 + 0.000352615 * sinD + 0.0334454 * cosD
return sinDec, cosDec, eqnOfTime, solFactor
def orbit_cfsr(utc):
"""
Calculate solar coefficients based on CFSR methodology
Ref. radiation_astronomy.f, subroutine solar
"""
# Get julian day and fractional part of day
jd, fjd = julian_day(utc)
# Julian day of epoch which is January 0, 1990 at 12 hours UTC
jdor = 2415020
# Days of years
cyear = 365.25
# Days between epoch and perihelioon passage of 1990
tpp = 1.55
# Days between perihelion passage and March equinox of 1990
svt6 = 78.035
# Julian centuries after epoch
t1 = (jd - jdor) / 36525.0
# Length of anomalistic and tropical years (minus 365 days)
ayear = 0.25964134e0 + 0.304e-5 * t1
tyear = 0.24219879e0 - 0.614e-5 * t1
# Orbit eccentricity and earth's inclination (deg)
ec = 0.01675104e0 - (0.418e-4 + 0.126e-6 * t1) * t1
angin = 23.452294e0 - (0.0130125e0 + 0.164e-5 * t1) * t1
ador = jdor
jdoe = np.asarray(ador + (svt6 * cyear) / (ayear - tyear), dtype=int)
# deleqn is updated svt6 for current date
deleqn = (jdoe - jd) * (ayear - tyear) / cyear
ayear = ayear + 365
sni = np.sin(np.radians(angin))
tini = 1 / np.tan(np.radians(angin))
er = np.sqrt((1 + ec) / (1 - ec))
# mean anomaly
qq = deleqn * 2 * np.pi / ayear
def solve_kepler(e, M, E=1, eps=1.3e-6):
"""
Solve Kepler equation for eccentric anomaly E by Newton's method
based on eccentricity e and mean anomaly M
"""
for i in range(10):
dE = -(E - e * np.sin(E) - M) / (1 - e * np.cos(E))
E += dE
dEmax = np.max(np.abs(dE))
if dEmax < eps:
break
else:
print("Warning: Exceeding 10 iterations in Kepler solver:", dEmax)
return E
# Eccentric anomaly at equinox
e1 = solve_kepler(ec, qq)
# True anomaly at equinox
eq = 2.0 * np.arctan(er * np.tan(0.5 * e1))
# Date is days since last perihelion passage
dat = jd - jdor - tpp + fjd
date = dat % ayear
# Mean anomaly
em = 2 * np.pi * date / ayear
# Eccentric anomaly
e1 = solve_kepler(ec, em)
# True anomaly
w1 = 2.0 * np.arctan(er * np.tan(0.5 * e1))
# Earth-Sun radius relative to mean radius
r1 = 1.0 - ec * np.cos(e1)
# Sine of declination angle
# NB. ecliptic longitude = w1 - eq
sdec = sni * np.sin(w1 - eq)
# Cosine of declination angle
cdec = np.sqrt(1.0 - sdec * sdec)
# Sun declination (radians)
dlt = np.arcsin(sdec)
# Sun right ascension (radians)
alp = np.arcsin(np.tan(dlt) * tini)
alp = np.where(np.cos(w1 - eq) < 0, np.pi - alp, alp)
alp = np.where(alp < 0, alp + 2 * np.pi, alp)
# Equation of time (radians)
sun = 2 * np.pi * (date - deleqn) / ayear
sun = np.where(sun < 0.0, sun + 2 * np.pi, sun)
slag = sun - alp - 0.03255
# Solar constant correction factor (inversely with radius squared)
solFactor = 1 / (r1 ** 2)
return sdec, cdec, slag, solFactor
def orbit_noaa(utc):
"""
Orbit as per NOAA Solar Calculation spreadsheet
https://www.esrl.noaa.gov/gmd/grad/solcalc/calcdetails.html
Similar to CFSR but faster
"""
# Julian day (including fractional part)
jd, fjd = julian_day(utc)
jd = jd + fjd
# Julian century
jc = (jd - 2451545) / 36525
# Geometric mean longitude (deg)
gml = (280.46646 + jc * (36000.76983 + jc * 0.0003032)) % 360
# Geometric mean anomaly Sun (deg)
gma = 357.52911 + jc * (35999.05029 - 0.0001537 * jc)
# Eccentricity of Earth's orbit
ecc = 0.016708634 - jc * (0.000042037 + 0.0000001267 * jc)
# Sun equation of centre (deg)
ctr = (
np.sin(np.radians(gma)) * (1.914602 - jc * (0.004817 + 0.000014 * jc))
+ np.sin(np.radians(2 * gma)) * (0.019993 - 0.000101 * jc)
+ np.sin(np.radians(3 * gma)) * 0.000289
)
# Sun true longitude (deg)
stl = gml + ctr
# Sun true anomaly (deg)
sta = gma + ctr
# Sun radius vector (AUs)
rad = (1.000001018 * (1 - ecc * ecc)) / (1 + ecc * np.cos(np.radians(sta)))
# Sun apparent longitude (deg)
sal = stl - 0.00569 - 0.00478 * np.sin(np.radians(125.04 - 1934.136 * jc))
# Mean obliquity ecliptic (deg)
moe = (
23
+ (26 + ((21.448 - jc * (46.815 + jc * (0.00059 - jc * 0.001813)))) / 60) / 60
)
# Obliquity correction (deg)
obl = moe + 0.00256 * np.cos(np.radians(125.04 - 1934.136 * jc))
# Sun right ascension (deg)
sra = np.degrees(
np.arctan2(
np.cos(np.radians(obl)) * np.sin(np.radians(sal)), np.cos(np.radians(sal)),
)
)
# Sun declination
sinDec = np.sin(np.radians(obl)) * np.sin(np.radians(sal))
cosDec = np.sqrt(1.0 - sinDec * sinDec)
# Var y
vary = np.tan(np.radians(obl / 2)) * np.tan(np.radians(obl / 2))
# Equation of time (minutes)
eqnOfTime = 4 * np.degrees(
vary * np.sin(2 * np.radians(gml))
- 2 * ecc * np.sin(np.radians(gma))
+ 4 * ecc * vary * np.sin(np.radians(gma)) * np.cos(2 * np.radians(gml))
- 0.5 * vary * vary * np.sin(4 * np.radians(gml))
- 1.25 * ecc * ecc * np.sin(2 * np.radians(gma))
)
# Convert from minutes to radians
eqnOfTime *= np.pi / (60 * 12)
# Solar constant correction factor (inversely with radius squared)
solFactor = 1 / (rad ** 2)
return sinDec, cosDec, eqnOfTime, solFactor
def orbit_merra2(utc):
"""
Orbit as per MERRA2 code
"""
# MERRA-2 solar repeats on a four-year leap-year cycle
yearlen = 365.25
days_per_cycle = 1461
if orbit_merra2.orbit is None:
# Constants from MAPL_Generic.F90
ecc = 0.0167
obliquity = np.radians(23.45)
perihelion = np.radians(102.0)
equinox = 80
omg = (2.0 * np.pi / yearlen) / np.sqrt(1 - ecc ** 2) ** 3
sob = np.sin(obliquity)
# TH: Orbit anomaly
# ZS: Sine of declination
# ZC: Cosine of declination
# PP: Inverse of square of earth-sun distance
# Integration starting at vernal equinox
def calc_omega(th):
return omg * (1.0 - ecc * np.cos(th - perihelion)) ** 2
orbit = np.recarray(
(days_per_cycle,),
dtype=[("th", float), ("zs", float), ("zc", float), ("pp", float)],
)
def update_orbit(th):
zs = np.sin(th) * sob
zc = np.sqrt(1.0 - zs ** 2)
pp = ((1.0 - ecc * np.cos(th - perihelion)) / (1.0 - ecc ** 2)) ** 2
orbit[kp] = th, zs, zc, pp
# Starting point
th = 0
kp = equinox
update_orbit(th)
# Runge-Kutta
for k in range(days_per_cycle - 1):
t1 = calc_omega(th)
t2 = calc_omega(th + 0.5 * t1)
t3 = calc_omega(th + 0.5 * t2)
t4 = calc_omega(th + t3)
kp = (kp + 1) % days_per_cycle
th += (t1 + 2 * (t2 + t3) + t4) / 6.0
update_orbit(th)
# Cache it
orbit_merra2.orbit = orbit
else:
orbit = orbit_merra2.orbit
# Map into orbit
year, month, day = split_date(utc)
doy = day_of_year(utc, snap=True)
iyear = (year - 1) % 4
iday = iyear * int(yearlen) + doy - 1
# Declination
sinDec = orbit["zs"][iday]
cosDec = orbit["zc"][iday]
# MERRA uses *solar* instead of *clock* time; no equation of time
eqnOfTime = np.zeros_like(sinDec)
# Inverse square of earth-sun distance ratio to mean distance
solFactor = orbit["pp"][iday]
return sinDec, cosDec, eqnOfTime, solFactor
# For caching MERRA-2 orbit
orbit_merra2.orbit = None
def orbit(utc, method=None):
if method is None:
method = "ASHRAE"
if callable(method):
func = method
method = "Custom"
else:
method = method.upper()
if method.startswith("A"):
func = orbit_ashrae
elif method.startswith("C"):
func = orbit_cfsr
elif method.startswith("E"):
func = orbit_energyplus
elif method.startswith("M"):
func = orbit_merra2
elif method.startswith("N"):
func = orbit_noaa
else:
raise NotImplementedError(method)
return func(utc)
def total_solar_irradiance_ashrae(utc):
"""
Return ASHRAE constant solar irradiance value (W/m²)
"""
return 1367.0 * (np.ones_like(utc).astype(float))
def total_solar_irradiance_cfsr(utc):
"""
Calculate CFSR total solar irradiance (W/m²) based on year and month
NB. Interpolates from yearly data
"""
#
year, month, _ = split_date(utc)
# TSI datum
TSI_datum = 1360.0
# van den Dool data (1979-2006); assumed valid in July of that year
# fmt: off
dTSI = np.array([
6.70, 6.70, 6.80, 6.60, 6.20, 6.00, 5.70, 5.70, 5.80, 6.20, 6.50,
6.50, 6.50, 6.40, 6.00, 5.80, 5.70, 5.70, 5.90, 6.40, 6.70, 6.70,
6.80, 6.70, 6.30, 6.10, 5.90, 5.70
])
# fmt: on
n = len(dTSI)
# Index into dTSI (float)
i = np.asarray(year).astype(int) - 1979 + (np.asarray(month) - 7) / 12
# Extend backward and/or forward assuming 11-year sunspot cycle
while np.any(i < 0):
i[i < 0] += 11
while np.any(i > n - 1):
i[i > n - 1] -= 11
# Add base
return TSI_datum + np.interp(i, np.arange(n), dTSI)
def total_solar_irradiance_merra2(utc):
"""
Calculate MERRA-2 total solar irradiance (W/m²) based on year and month
"""
year, month, _ = split_date(utc)
# CMIP5 data (1980-2008), monthly
# http://solarisheppa.geomar.de/solarisheppa/sites/default/files/data/CMIP5/TSI_WLS_mon_1882_2008.txt
# fmt: off
TSI = np.array([
[1366.8707, 1366.6385, 1367.0020, 1366.3137, 1366.4717, 1366.7686,
1366.7025, 1366.6991, 1366.6078, 1366.5760, 1366.1366, 1366.9659],
[1367.0270, 1366.5762, 1366.7291, 1367.0487, 1366.7421, 1366.5843,
1365.8833, 1367.0589, 1366.7669, 1366.4607, 1366.7618, 1366.6833],
[1367.0527, 1365.9164, 1365.9046, 1366.4697, 1366.4086, 1365.5996,
1366.1626, 1366.2002, 1366.5021, 1366.6118, 1366.4150, 1366.2152],
[1366.4198, 1366.2211, 1366.2509, 1366.2035, 1366.1029, 1366.1212,
1366.2853, 1366.4204, 1366.2336, 1366.0589, 1366.1071, 1366.0605],
[1365.4259, 1365.6620, 1366.1702, 1365.5668, 1365.7794, 1366.0970,
1366.1162, 1365.9801, 1365.8692, 1365.7895, 1365.6831, 1365.7649],
[1365.6116, 1365.7119, 1365.6604, 1365.5154, 1365.6400, 1365.6998,
1365.6543, 1365.7532, 1365.6687, 1365.5303, 1365.6323, 1365.6828],
[1365.6780, 1365.5509, 1365.6831, 1365.6565, 1365.7309, 1365.6649,
1365.6543, 1365.6022, 1365.6068, 1365.6499, 1365.7130, 1365.6751],
[1365.6707, 1365.6624, 1365.6726, 1365.6419, 1365.7595, 1365.8341,
1365.8257, 1365.7894, 1365.8603, 1365.8542, 1365.9870, 1366.0384],
[1366.0580, 1366.1113, 1365.9553, 1366.0675, 1366.3042, 1366.0166,
1365.8303, 1366.1485, 1366.4650, 1366.1152, 1366.2991, 1366.2632],
[1366.5443, 1366.6023, 1366.3792, 1366.5935, 1366.7821, 1366.3332,
1367.0719, 1366.5117, 1366.2650, 1366.9587, 1366.8282, 1366.8817],
[1366.8792, 1366.6387, 1366.6480, 1366.8708, 1366.5344, 1366.7742,
1366.4636, 1366.1724, 1366.8062, 1366.6181, 1365.8552, 1366.3904],
[1366.0560, 1366.3106, 1366.5274, 1367.0611, 1366.4294, 1366.4347,
1366.6702, 1366.4596, 1366.8890, 1366.1511, 1366.6261, 1365.9471],
[1366.5259, 1366.4305, 1366.7496, 1366.5985, 1366.4207, 1366.3006,
1366.0603, 1366.0338, 1366.1649, 1365.9236, 1366.1362, 1366.2879],
[1366.3059, 1365.9018, 1366.2124, 1366.1830, 1366.1459, 1366.1432,
1366.0951, 1366.0493, 1365.8926, 1365.7306, 1365.7609, 1365.9120],
[1365.7409, 1365.9919, 1366.0338, 1365.8676, 1365.7668, 1365.7674,
1365.7641, 1365.7805, 1365.6507, 1365.7192, 1365.8328, 1365.7086],
[1365.8283, 1365.8175, 1365.7226, 1365.6256, 1365.6620, 1365.7283,
1365.6993, 1365.7184, 1365.6976, 1365.6064, 1365.6769, 1365.6436],
[1365.6443, 1365.6287, 1365.5849, 1365.6109, 1365.6276, 1365.6290,
1365.6002, 1365.6662, 1365.6821, 1365.6348, 1365.4741, 1365.7028],
[1365.6989, 1365.6747, 1365.7008, 1365.7047, 1365.7390, 1365.7301,
1365.7250, 1365.7857, 1365.6768, 1365.9331, 1365.8454, 1365.8881],
[1365.9627, 1365.9199, 1365.8269, 1366.0931, 1365.9647, 1366.0578,
1366.2478, 1366.0894, 1366.0800, 1366.3429, 1366.2589, 1366.3730],
[1366.4806, 1366.2429, 1366.3572, 1366.2549, 1366.3835, 1366.3984,
1366.4362, 1366.4766, 1366.5841, 1366.2329, 1366.3558, 1366.3730],
[1366.7211, 1366.6320, 1366.4819, 1366.6498, 1366.3611, 1366.4507,
1366.5754, 1366.9738, 1366.5276, 1366.9746, 1366.9062, 1366.9492],
[1366.7811, 1366.8458, 1366.4121, 1366.4659, 1366.5200, 1366.5092,
1366.7203, 1366.4475, 1366.3010, 1366.8140, 1366.5200, 1366.8910],
[1367.3162, 1367.1783, 1367.0065, 1366.6454, 1366.6470, 1366.6873,
1366.1716, 1366.3053, 1366.4584, 1366.5261, 1366.4495, 1366.7773],
[1366.6034, 1366.5458, 1366.1968, 1366.2227, 1366.1753, 1366.0914,
1366.2437, 1366.2744, 1366.3611, 1365.5612, 1366.1956, 1366.2899],
[1366.1038, 1366.0890, 1366.1272, 1366.1742, 1366.0297, 1366.0179,
1365.7578, 1365.9036, 1366.0957, 1366.1166, 1366.0057, 1366.1552],
[1365.7864, 1365.9349, 1365.8956, 1365.8800, 1365.8463, 1365.8059,
1365.8595, 1365.9275, 1365.7988, 1365.8860, 1365.7792, 1365.8549],
[1365.8986, 1365.8728, 1365.7850, 1365.8058, 1365.9230, 1365.8340,
1365.8212, 1365.7067, 1365.8419, 1365.8270, 1365.7039, 1365.7087],
[1365.7173, 1365.7145, 1365.7544, 1365.7228, 1365.6932, 1365.7616,
1365.7506, 1365.7566, 1365.7159, 1365.7388, 1365.6680, 1365.6927],
[1365.7163, 1365.7366, 1365.6726, 1365.7146, 1365.7175, 1365.6730,
1365.6720, 1365.6570, 1365.6647, 1365.6759, 1365.7065, 1365.6926]
])
# fmt: on
n = TSI.shape[0]
# Index year
i = np.asarray(year).astype(int) - 1980
# Extend backward assuming 11-year sunspot cycle and forward assuming
# 13-year
while np.any(i < 0):
i[i < 0] += 11
while np.any(i > n - 1):
i[i > n - 1] -= 13
# Index month
j = np.asarray(month).astype(int) - 1
# Return index scaled by TIM correction (Total Irradiance Monitor)
return 0.9965 * TSI[i, j]
def total_solar_irradiance(utc, method=None):
"""
Calculate the total solar irradiance (W/m²) for given year.
Year can be fractional.
"""
if method is None:
method = "ASHRAE"
if callable(method):
func = method
else:
method = method.upper()
if method.startswith("A"):
func = total_solar_irradiance_ashrae
elif method.startswith("C"):
func = total_solar_irradiance_cfsr
elif method.startswith("E"):
func = total_solar_irradiance_ashrae
elif method.startswith("M"):
func = total_solar_irradiance_merra2
elif method.startswith("N"):
func = total_solar_irradiance_ashrae
else:
raise NotImplementedError(method)
return func(utc)
def hour_angle(lon, utc, eot):
"""
Calculate local hour angle (radians) given longitude (degrees),
date UTC (datetime64), and equation of time (radians)
Hour angle is displacement of sun east or west
"""
# Local solar hour angle (radians, noon = 0)
hh, mm, ss = split_time(utc)
H = 2 * np.pi * ((hh - 12) / 24 + mm / 1440 + ss / 86400)
# Correct based on equation of time
H += eot
# Move to longitude location
H += np.radians(lon)
# Return centered in -pi to pi
return ((H + np.pi) % (2 * np.pi)) - np.pi
def sunset_hour_angle(sinLat, cosLat, sinDec, cosDec):
"""
Calculate local sunset hour angle (radians) given sines and cosines
of latitude and declination.
"""
return np.arccos(np.clip(-sinDec / cosDec * sinLat / cosLat, -1, 1))
def position(lat, lon, utc, method="ASHRAE"):
"""
Calculate solar position (x, y, z) in sky given (lat, lon) and UTC time
"""
# Calculate solar coefficients
sinDec, cosDec, eqnOfTime, solFactor = orbit(utc, method=method)
# Calculate hour angle
H = hour_angle(lon, utc, eqnOfTime)
sinH = np.sin(H)
cosH = np.cos(H)
# Sun position
sinLat = np.sin(np.radians(lat))
cosLat = np.cos(np.radians(lat))
return (
cosDec * sinH,
sinDec * cosLat - cosDec * sinLat * cosH,
sinDec * sinLat + cosDec * cosLat * cosH,
)
def clear_sky_irradiance(z, tb, td, E0):
"""
Calculate the ASHRAE clear sky beam normal and diffuse horizontal
irradiance at elevation z, given pseudo-optical coefficients tb and td,
and extra-terrestrial radiation E0
"""
# Calculate air mass
m = air_mass(z)
# Calculate air mass exponents
B1, B2, B3, B4 = 1.454, -0.406, -0.286, 0.021
D1, D2, D3, D4 = 0.507, 0.205, -0.080, -0.190
ab = B1 + B2 * tb + B3 * td + B4 * tb * td
ad = D1 + D2 * tb + D3 * td + D4 * tb * td
# Beam and diffuse irradiance
return E0 * np.exp(-tb * m ** ab), E0 * np.exp(-td * m ** ad)
def elevation(lat, lon, utc, method="ASHRAE", interval=None, h=None):
"""
Calculate the elevation z and extraterrestrial radiation E0 at
(lat, lon) and UTC time.
Result is either "instantaneous" (default) or the average over
an "hourly" or "daily" interval.
If hour angle h (rad) is supplied (e.g. solar noon = 0), instantaneous
elevations will be based on h, otherwise h is calculated from UTC.
"""
# Calculate solar coefficients at UTC
sinDec, cosDec, eqnOfTime, solFactor = orbit(utc, method=method)
# Calculate extraterrestrial radiance at UTC
E0 = solFactor * total_solar_irradiance(utc, method=method)
# Latitudinal sines
sinLat = np.sin(np.radians(lat))
cosLat = np.cos(np.radians(lat))
def int_elevation(h):
"""
Instant elevation at hour angle h
"""
return np.maximum(sinDec * sinLat + cosDec * cosLat * np.cos(h), 0)
def avg_elevation(h1, h2):
"""
Integrated elevation between h1 and h2
"""
return np.maximum(
sinLat * sinDec * (h2 - h1) + cosLat * cosDec * (np.sin(h2) - np.sin(h1)), 0
)
# Default interval is instantaneous
if interval is None:
interval = "instant"
interval = interval.lower()[0]
# Determine elevation
if interval == "i":
"""
Instantaneous
"""
# Instantaneous hour angle
if h is None:
h = hour_angle(lon, utc, eqnOfTime)
# Instantaneous elevation
z = int_elevation(h)
elif interval == "m":
"""
Instantaneous mid-point of previous hour, i.e. approximate average
"""
# Instantaneous hour angle at 30 minutes prior
h = hour_angle(lon, utc - np.timedelta64(30, "m"), eqnOfTime)
# Instantaneous elevation
z = int_elevation(h)
elif interval == "h":
"""
Hourly
"""
# Sunset hour angle
h0 = np.arccos(np.clip(-sinDec / cosDec * sinLat / cosLat, -1, 1))
# One hour (radians)
dh = np.pi / 12
# Start and end hour angles
h = hour_angle(lon, utc, eqnOfTime)
a = (h - dh + np.pi) % (2 * np.pi) - np.pi
b = a + dh
# Default elevation is zero
z = np.zeros_like(h)
# Conditions
a1 = a < -h0
a2 = (a >= -h0) & (a < h0)
# b1 = (b < -h0)
b2 = (b >= -h0) & (b < h0)
b3 = b >= h0
# Dawn
np.copyto(z, avg_elevation(-h0, b), where=a1 & b2)
# Comes up very briefly between a & b
np.copyto(z, avg_elevation(-h0, h0), where=a1 & b3)
# Sun's up
np.copyto(z, avg_elevation(a, b), where=a2 & b2)
# Dusk
np.copyto(z, avg_elevation(a, h0), where=a2 & b3)
# Scale by interval
z /= dh
elif interval == "d":
"""
Daily
"""
# Sunset hour angle
h = np.arccos(np.clip(-sinDec / cosDec * sinLat / cosLat, -1, 1))
# Average daily elevation
z = avg_elevation(-h, h)
# Scale by 24-hour interval
z /= 2 * np.pi
else:
raise ValueError(
"Interval must be one of 'instant', 'midpoint', " "'hourly', or 'daily'"
)
return z, E0
def air_mass(z):
"""
Calculate air mass based on Kasten & Young 1989
"""
beta = np.degrees(np.arcsin(z))
return 1 / (z + 0.50572 * (6.07995 + beta) ** -1.6364)
def erbs(Kt, **kwargs):
"""
Calculate diffuse fraction as a function of clearness index kt
via Erbs relation
"""
Kt = np.asarray(Kt)
Kd = 0.9511 - 0.1604 * Kt + 4.388 * Kt ** 2 - 16.638 * Kt ** 3 + 12.336 * Kt ** 4
np.copyto(Kd, 1.0 - 0.09 * Kt, where=Kt <= 0.22)
np.copyto(Kd, 0.165, where=Kt > 0.80)
return Kd
def orgill_hollands(Kt, **kwargs):
"""
Calculate diffuse fraction as a function of clearness index kt
via Orgill Hollands relation
"""
Kt = np.asarray(Kt)
Kd = 1.557 - 1.84 * Kt
np.copyto(Kd, 1.0 - 0.249 * Kt, where=Kt <= 0.35)
np.copyto(Kd, 0.177, where=Kt > 0.75)
return Kd
def ruiz_arias(Kt, z, **kwargs):
"""
Calculate diffuse fraction as a function of clearness index kt
via Ruiz-Arias
"""
m = air_mass(z)
a = (0.944, 1.538, 2.808, -5.759, 2.276, -0.125, 0.013)
return np.clip(
a[0]
- a[1]
* np.exp(-np.exp(a[2] + a[3] * Kt + a[4] * Kt ** 2 + a[5] * m + a[6] * m ** 2)),
0,
1,
)
def engerer(Kt, Ktc, z, h, **kwargs):
"""
Calculate diffuse fraction as a function of clearness index kt
via the Engerer2 relation.
kt is clearness index (E_gh/E0)
ktc is clear sky clearness index (E_ghc/E0)
z is cos(zenith), dimensionless
h is hour angle, radians
"""
# Apparent solar time in hours
AST = 12 / np.pi * h
# Zenith angle in degrees
theta_z = np.degrees(np.arccos(z))
dKtc = Ktc - Kt
Kde = np.maximum(0, 1.0 - Ktc / Kt)
C = 4.2336e-2
beta = (-3.7912, 7.5479, -1.0036e-2, 3.1480e-3, -5.3146, 1.7073)
return np.clip(
C
+ (1.0 - C)
/ (
1.0
+ np.exp(
beta[0]
+ beta[1] * Kt
+ beta[2] * AST
+ beta[3] * theta_z
+ beta[4] * dKtc
)
)
+ beta[5] * Kde,
0,
1,
)
def to_utc(lst, tz=0):
"""
Convert datetime64 in local standard time to UTC
"""
return lst - np.timedelta64(int(np.rint(tz * 60)), "m")
def to_lst(utc, tz=0):
"""
Convert datetime64 in UTC to local standard time
"""
return utc + np.timedelta64(int(np.rint(tz * 60)), "m")
def to_altitude(z):
"""
Convert z component of solar vector into altitude (deg)
i.e. angle from horizon
"""
return np.degrees(np.arcsin(z))
def to_zenith(z):
"""
Convert z component of solar vector into zenith angle (deg)
i.e. angle from vertical
"""
return np.degrees(np.arccos(z))
def to_azimuth(x, y):
"""
Convert x, y of solar vector into azimuth (deg)
i.e. angle clockwise from North (+y)
"""
return (-np.degrees(np.arctan2(x, y))) % 360
def nearest_hour(date):
"""
Convert datetime64 to nearest hour
"""
# Add 30 minutes
date += np.timedelta64(30, "m")
# Truncate on hour
return date.astype("<M8[h]")
def fit_taus(zi, Kti, iter_max=42, eps_max=1e-6, plot=False, quiet=False):
"""
Fit the ASHRAE pseudo-spectral coefficients tau_b & tau_d given a
set of elevation z and clear sky index Kt values.
"""
# Need at least two points
if len(Kti) < 2:
if not quiet:
print("Warning: Insufficient points to fit taus")
return np.nan, np.nan
# First estimate
tb, td = 0.4, 2.3
# tau air mass exponent coefficients
B1, B2, B3, B4 = 1.454, -0.406, -0.268, 0.021
D1, D2, D3, D4 = 0.507, 0.205, -0.080, -0.190
# Calculate air mass
mi = air_mass(zi)
logm = np.log(mi)
# Newton iterations
def calc(tb, td):
# Current air mass exponents
ab = B1 + B2 * tb + B3 * td + B4 * tb * td
ad = D1 + D2 * tb + D3 * td + D4 * tb * td
mab = mi ** ab
mad = mi ** ad
Kb = np.exp(-tb * mab)
Kd = np.exp(-td * mad) / zi
Kt = Kb + Kd
# Form Jacobian J
dKb_dtb = -Kb * mab * (1 + tb * logm * (B2 + B4 * td))
dKd_dtb = -Kd * mad * td * logm * (D2 + D4 * td)
dKb_dtd = -Kb * mab * tb * logm * (B3 + B4 * tb)
dKd_dtd = -Kd * mad * (1 + td * logm * (D3 + D4 * tb))
dKt_dtb = dKb_dtb + dKd_dtb
dKt_dtd = dKb_dtd + dKd_dtd
return Kt, dKt_dtb, dKt_dtd
# Levenberg–Marquardt damping factor
damping = 1
taubs = [tb]
tauds = [td]
for i in range(iter_max):
# Calculate current Kt and its gradient
Kt, dKt_dtb, dKt_dtd = calc(tb, td)
# Residuals
dKt = Kti - Kt
R = np.sum(dKt ** 2)
# Form A, [J]^T[J]
Abb = (1 + damping) * np.sum(dKt_dtb ** 2)
Abd = np.sum(dKt_dtb * dKt_dtd)
Add = (1 + damping) * np.sum(dKt_dtd ** 2)
# Form forcing vector [J]^[dKt]
Bb = np.sum(dKt_dtb * dKt)
Bd = np.sum(dKt_dtd * dKt)
# Solve A*t = B by Kramer's rule, Giddy-up
try:
detA = Abb * Add - Abd ** 2
dtb = (Bb * Add - Bd * Abd) / detA
dtd = (Abb * Bd - Abd * Bb) / detA
except OverflowError:
if not quiet:
print("Warning: Determinant overflow while fitting taus")
return np.nan, np.nan
except ZeroDivisionError:
if not quiet:
print("Warning: Division by zero while fitting taus")
return np.nan, np.nan
except Exception:
raise
# Test
Ktt, dKtt_dtb, dKtt_dtd = calc(tb + dtb, td + dtd)
Rt = np.sum((Kti - Ktt) ** 2)
if Rt >= R:
# Worse (need more steep descent)
damping *= 10
else:
# Better (need more Newton)
damping /= 10
# Correct
tb += dtb
td += dtd
R = Rt
taubs.append(tb)
tauds.append(td)
if (abs(dtb) < eps_max) and (abs(dtd) < eps_max):
break
else:
# Exceeded iterMax iterations
if not quiet:
print(
"Warning: Exceeded",
iter_max,
"iterations while fitting taus:",
tb,
td,
dtb,
dtd,
)
return np.nan, np.nan
if plot:
plt.rc("text", usetex=True)
plt.rc("text.latex", unicode=True)
plt.rc("text.latex", preamble=r"\usepackage{cmbright}")
f, ax = plt.subplots(figsize=(5, 5), dpi=200)
ax.plot(mi, Kti, ".", color="orange", markersize=2)
ax.plot(mi, Kt, ".", color="black", markersize=4)
ax.set_xlabel("Air Mass $m$", fontsize="smaller")
ax.set_ylabel("Clearness Index $K_t$", fontsize="smaller")
txt = "\n".join(
"%d points" % len(zi), "$\\tau_b$ = %.3f" % tb, "$\\tau_d$ = %.3f" % td
)
ax.text(0.9, 0.9, txt, ha="right", va="top", transform=ax.transAxes)
plt.tight_layout()
f.savefig("fit_taus_%d.png" % len(Kti), dpi=f.dpi, bbox_inches="tight")
return tb, td
def fit_monthly_taus(z, Kt, lat=None, lon=None, noon_flux=False, **kwargs):
# Loop over months
months = list(range(1, 13))
clear_sky = {
"taub": [],
"taud": [],
}
if noon_flux:
clear_sky["Ebnoon"] = []
clear_sky["Ednoon"] = []
for month in months:
# Restrict to month
i = z.index.month == month
# Fit via non-linear least squares
taub, taud = fit_taus(z[i], Kt[i], **kwargs)
clear_sky["taub"].append(taub)
clear_sky["taud"].append(taud)
if noon_flux:
if np.isnan(taub) or np.isnan(taud):
clear_sky["Ebnoon"].append(np.nan)
clear_sky["Ednoon"].append(np.nan)
continue
# Calculate noon elevation and solar ETR on the 21st day
utc = join_date(2001, m=month, d=21, hh=12)
z_noon, E0_noon = elevation(
lat, lon, utc, method="ASHRAE", interval="instant", h=0
)
# Calculate corresponding beam and diffuse irradiance
Eb, Ed = clear_sky_irradiance(z_noon, taub, taud, E0_noon)
clear_sky["Ebnoon"].append(Eb)
clear_sky["Ednoon"].append(Ed)
return clear_sky
def perez(Eb, Ed, E0, E0h, Td):
"""
Calculate the global, direct, diffuse, and zenith illuminances from
the beam, diffuse, extraterrestrial normal and direct irradiances and
dew point temperature via the Perez (1990) relationships
"""
# Sun up and working
d = Ed > 0
# Calculate elevation z=cosZ
z = E0h[d] / E0[d]
# Calculate zenith angle (radians)
Z = np.arccos(z)
Z3 = Z ** 3
# Calculate air mass
m = air_mass(z)
# Sky clearness (eqn 1)
kappa = 1.04
epsilon = ((Ed[d] + Eb[d]) / Ed[d] + kappa * Z3) / (1 + kappa * Z3)
# Sky brightness (eqn 2)
Delta = Ed[d] * m / E0[d]
# Precipitable water (cm, eqn 3)
W = np.exp(0.07 * Td[d] - 0.075)
# Sky clearness categories (from overcast to clear)
bin_edges = [1, 1.065, 1.230, 1.500, 1.950, 2.800, 4.500, 6.200]
# Find clearnness bin
i = np.searchsorted(bin_edges, epsilon, side="right") - 1
# Global luminous efficacy (table 4)
ai = np.array([96.63, 107.54, 98.73, 92.72, 86.73, 88.34, 78.63, 99.65])
bi = np.array([-0.47, 0.79, 0.70, 0.56, 0.98, 1.39, 1.47, 1.86])
ci = np.array([11.50, 1.79, 4.40, 8.36, 7.10, 6.06, 4.93, -4.46])
di = np.array([-9.16, -1.19, -6.95, -8.31, -10.94, -7.60, -11.37, -3.15])
# Global illuminance (lux, eqn. 6)
It = Ed.copy()
It[d] = (Eb[d] * z + Ed[d]) * (
ai[i] + bi[i] * W + ci[i] * z + di[i] * np.log(Delta)
)
# Direct luminous efficiacy (table 4)
ai = np.array([57.20, 98.99, 109.83, 110.34, 106.36, 107.19, 105.75, 101.18])
bi = np.array([-4.55, -3.46, -4.90, -5.84, -3.97, -1.25, 0.77, 1.58])
ci = np.array([-2.98, -1.21, -1.71, -1.99, -1.75, -1.51, -1.25, -1.10])
di = np.array([117.12, 12.38, -8.81, -4.56, -6.16, -26.73, -34.44, -8.29])
# Direct illuminance (lux, eqn. 8)
Ib = Ed.copy()
Ib[d] = Eb[d] * (ai[i] + bi[i] * W + ci[i] * np.exp(5.73 * Z - 5) + di[i] * Delta)
Ib = np.maximum(0, Ib)
# Diffuse luminous efficiacy (table 4)
ai = np.array([97.24, 107.22, 104.97, 102.39, 100.71, 106.42, 141.88, 152.23])
bi = np.array([-0.46, 1.15, 2.96, 5.59, 5.94, 3.83, 1.90, 0.35])
ci = np.array([12.00, 0.59, -5.53, -13.95, -22.75, -36.15, -53.24, -45.27])
di = np.array([-8.91, -3.95, -8.77, -13.90, -23.74, -28.83, -14.03, -7.98])
# Diffuse illuminance (lux, eqn. 7)
Id = Ed.copy()
Id[d] = Ed[d] * (ai[i] + bi[i] * W + ci[i] * z + di[i] * np.log(Delta))
# Zenith luminance prediction (table 4)
ai = np.array([40.86, 26.58, 19.34, 13.25, 14.47, 19.76, 28.39, 42.91])
ci = np.array([26.77, 14.73, 2.28, -1.39, -5.09, -3.88, -9.67, -19.62])
cip = np.array([-29.59, 58.46, 100.00, 124.79, 160.09, 154.61, 151.58, 130.80])
di = np.array([-45.75, -21.25, 0.25, 15.66, 9.13, -19.21, -69.39, -164.08])
# Zenith luminance (Cd/m2, eqn. 10)
Lz = Ed.copy()
Lz[d] = Ed[d] * (ai[i] + ci[i] * z + cip[i] * np.exp(-3 * Z) + di[i] * Delta)
return It, Ib, Id, Lz
def test_coeffs(year=2018):
t1 = np.datetime64("%04d-01-01" % year)
t2 = np.datetime64("%04d-01-01" % (year + 1,))
utc = np.arange(t1, t2)
f, ax = plt.subplots(4)
for method in ["ashrae", "energyplus", "cfsr", "merra2", "noaa"]:
coeffs = orbit(utc, method=method)
for i, ylabel in enumerate(["sinDec", "cosDec", "eqnOfTime", "solFactor"]):
ax[i].plot(utc, coeffs[i], label=method)
ax[i].set_ylabel(ylabel)
ax[i].legend(loc=0, fontsize="smaller")
plt.tight_layout()
plt.show()
def test_location(lat=33.64, lon=-84.43, dates=None):
if dates is None:
dates = [np.datetime64(datetime.datetime.utcnow())]
for utc in dates:
# 24 hours centered around UTC
t = nearest_hour(utc) + np.arange(-12 * 60, 13 * 60, dtype="<m8[m]")
print(nearest_hour(utc))
f, ax = plt.subplots(3)
for method in ["ashrae", "energyplus", "cfsr", "merra", "noaa"]:
x, y, z = position(lat, lon, t, method=method)
ax[0].plot_date(
t.astype(datetime.datetime), to_altitude(z), label=method, fmt="-"
)
ax[1].plot_date(
t.astype(datetime.datetime), to_azimuth(x, y), label=method, fmt="-"
)
z, E0 = elevation(lat, lon, t, method=method)
ax[2].plot_date(t.astype(datetime.datetime), E0 * z, fmt="-", label=method)
x0, y0, z0 = position(lat, lon, utc, method=method)
print(to_altitude(z0), to_azimuth(x0, y0))
ax[0].set_ylabel("Alt")
ax[1].set_ylabel("Azi")
ax[2].set_ylabel("TOA Horz")
ax[2].legend(loc="best", fontsize="smaller")
f.autofmt_xdate()
plt.tight_layout()
plt.show()
def test_integration(lat=33.64, lon=-84.43, utc=None):
if utc is None:
utc = np.datetime64(datetime.datetime.utcnow())
print("***")
print(lat, lon, utc, to_lst(utc, tz=-5))
for interval in ["instant", "hourly", "daily"]:
print(elevation(lat, lon, utc, method="ASHRAE", interval=None))
def test_solar_irradiance():
years, months = np.mgrid[1979:2019, 1:13]
utc = join_date(y=years.flatten(), m=months.flatten())
f, ax = plt.subplots()
for method in ["ashrae", "cfsr", "merra2"]:
tsi = total_solar_irradiance(utc, method=method)
ax.plot_date(
utc.astype(datetime.datetime), tsi, fmt="-", label=method, clip_on=False
)
ax.set_ylim(1360, 1368)
ax.get_yaxis().get_major_formatter().set_useOffset(False)
ax.legend(loc="best", fontsize="smaller")
ax.set_ylabel("TSI")
plt.tight_layout()
plt.show()
def test():
test_solar_irradiance()
test_coeffs(year=2011)
test_location()
test_integration(lat=43.5448, lon=-80.2482)
if __name__ == "__main__":
test()
|
py | b416e5e02ecbfe1ca7418456f7ec43a9fb8249fc | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Valiant Systems and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestSubEquipment(unittest.TestCase):
pass
|
py | b416e6e8c0f123e8e80d687b508c12065b6f898c | import cloudmesh as cm
cm.banner('HW3 ex2')
print cm.shell('cloud list')
print cm.shell('cloud on india')
print cm.shell('cloud list')
|
py | b416e94ebd00d6dbd090f4fcf4ae171da9e84add | """
ASGI config for minor project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'minor.settings')
application = get_asgi_application()
|
py | b416eaa90ab3e8d096d824ef04eebc0c9df85ab4 | XXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXX XXXXX
XXXXXXXXXXXXXXXXXXXXX
XXX XXX XXXXXXXXX XXXXXXX XXXXXXXXXXX XXXXX XXX XXXXXXXX XX XXXXXXX XXXXX XX
XXXX XXXXXXXX XXXX XXX XXXXXXXXX XXX XXXX XXXXXXXX XXXX XXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXX XXXX XX XXXXXXXXXX XXXXXXXXXXXXXXX XXXX XXXXXXX XXX XXXXXX XXXX
XXX XXXX XX XXXXXXXXX XXXXX
XXXXXXXXXXX XXXXXXX
XXXX XXXX XXXXXXXX XXXXXXX XXX XXXXXXXXXX XXXX XXX XXXXXXXXX XXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXX XXXXXX XXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXX
XXXX XXXXXXXXXXXXXXX XXXXXX XXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX X XXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX
XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX X XXXXXXXXXXXXXX
XXXXXX XXXXXXX
XXXXXXXXX XXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXX XXXXXX XXXX
XXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXX
XXXXXXXXXXX X X
XXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
X
XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXX
XXXXXXX
XXXXXXXXXXXX XXXX
XXXXXXXXXXXXX FFFFXXXX
XXXXXXXX FFFFXXXX
XXXXXXXXXXXX
XXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXX XXXXXXXXXXXX X XXXX XX XXXXXXXX
XXXXX XXXX XXXX XX XXXXXXXXXX XXXXXXXXXXXXXXXXXXXX XXXX XXXXXXX XXX XXXX XX
XXXXXXX XXXXXXXXX XXX XXX XXXXXXXXXXX X XXXXXXXXX XXXX XXX XXXX XX
XXXXXXXXX XXXXX
XXXXXXXXXXX XXXXXXX
XXXX XXXX XXXXXXXX XXXXXXX XXX XXXXXXXXXX XXXX XXX XXXXXXXXX XXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXX XXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXX XXXXXX XXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXX
XXXX XXXXXXXXXXXXXXX XXXXXX XXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX X XXXXXXX
XXXXXXXXXXX X XXX X XX XXXXXXXXXX XX XXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXX
XXXXXXX X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX X XXXXXXXXXXXXXX
XXXXXX XXXXXXX
XXXXXXXXX XXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXX XXXXXX XXXX
XXXX XXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXX X X
XXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
X
XXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXX
BBB BBBBBBB BB BBBBBBBBBBB
XXXXFFFF X XXXXX
BBBBB
XXXXXX XXXXXXXX XXXXXXXXX
BBBBBB
XXXXX
XX XXXXXX XXXXX XXXXXXXXXXX XXX XXX XXXXX XXX XXXXXXXXXXXXX XXXXXXXX XXXX
XXX XXXXXXXXXX XXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXX XX
XXXXXXXXXXXX XX XXXX XXXXXXX XXXXXXXX XX XXXXXXXXXXXXX
XX XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXX XXXX XXX XXXXXXXXXX X XXXX XX XXXXXXXX XX XX XXX XXXXXXXX XX XX XXXX
XXXXXXXXX XXX XXXXXX XX X XXXXXX XXXXX XX XXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXX XXXXX XXXXXXXXXXXX
XXXXX XX XXXXXXXX
XXXXXXXXXXX XXXXXXX
XXXX XXXX XXXXXXXX XXXXXXX XXX XXXXXXXXXX XXXX XXX XXXXXXXXX XXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
X XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XX XXXXXXXX XXXXXXXXXXXX XXXXXX XXXXXXXXX
XXXX XXXXXXXXXXXXXXX XX XXX XXXXXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XX XXXX XXXX XXXXXXX XX XXXXX XXXXX XX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX
XXXXX XXXX XXXXX X XXX XXXXXX
|
py | b416eac27a875df11ec492069338e257d4e3ba87 | # -*- coding: UTF-8 -*-
"""
This app allow us to simulate a tank. This tank have a maximum volume, a initial volume
an input flow and an output flow that would be two variables.
we can use other mathematical models with this one like level sensors that will change with the volume of the tank, valves or pumps
that will modify the input or output flow, ...
"""
import threading
import logging
from .models import float_sensor
from .float_sensor import float_sensor_simulator
class scadathread(threading.Thread):
def __init__(self,
server=None,
database=None,
end=threading.Event(),
debug_logger=logging):
threading.Thread.__init__(self)
self.logger=debug_logger
self.database=database
self.end=end
self.thread_list={}
for var_float in float_sensor.objects.all():
try:
float_sensor_instance=float_sensor_simulator(database=self.database,
end=self.end,
logger=self.logger,
switch_volume = var_float.switch_volume,
switch_logic= var_float.switch_logic,
level_var= var_float.tank.volume,
output_var= var_float.output_var.name)
self.thread_list[var_float.name]=float_sensor_instance
except:
self.logger.error('Error creating float sensor: %s' %var_float.name)
continue
def run(self):
for thread_name in self.thread_list:
self.thread_list[thread_name].start()
for thread_name in self.thread_list:
self.thread_list[thread_name].join()
|
py | b416ebad537931c8a0b1ab8b1d2867805236fad7 | """The tests for the Modbus init.
This file is responsible for testing:
- pymodbus API
- Functionality of class ModbusHub
- Coverage 100%:
__init__.py
const.py
modbus.py
validators.py
baseplatform.py (only BasePlatform)
It uses binary_sensors/sensors to do black box testing of the read calls.
"""
from datetime import timedelta
import logging
from unittest import mock
from pymodbus.exceptions import ModbusException
from pymodbus.pdu import ExceptionResponse, IllegalFunctionRequest
import pytest
import voluptuous as vol
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.modbus.const import (
ATTR_ADDRESS,
ATTR_HUB,
ATTR_SLAVE,
ATTR_UNIT,
ATTR_VALUE,
CALL_TYPE_COIL,
CALL_TYPE_DISCRETE,
CALL_TYPE_REGISTER_HOLDING,
CALL_TYPE_REGISTER_INPUT,
CALL_TYPE_WRITE_COIL,
CALL_TYPE_WRITE_COILS,
CALL_TYPE_WRITE_REGISTER,
CALL_TYPE_WRITE_REGISTERS,
CONF_BAUDRATE,
CONF_BYTESIZE,
CONF_DATA_TYPE,
CONF_INPUT_TYPE,
CONF_MSG_WAIT,
CONF_PARITY,
CONF_STOPBITS,
CONF_SWAP,
CONF_SWAP_BYTE,
CONF_SWAP_WORD,
DEFAULT_SCAN_INTERVAL,
MODBUS_DOMAIN as DOMAIN,
RTUOVERTCP,
SERIAL,
SERVICE_RESTART,
SERVICE_STOP,
SERVICE_WRITE_COIL,
SERVICE_WRITE_REGISTER,
TCP,
UDP,
DataType,
)
from homeassistant.components.modbus.validators import (
duplicate_entity_validator,
duplicate_modbus_validator,
number_validator,
struct_validator,
)
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import (
ATTR_STATE,
CONF_ADDRESS,
CONF_BINARY_SENSORS,
CONF_COUNT,
CONF_DELAY,
CONF_HOST,
CONF_METHOD,
CONF_NAME,
CONF_PORT,
CONF_SCAN_INTERVAL,
CONF_SENSORS,
CONF_SLAVE,
CONF_STRUCTURE,
CONF_TIMEOUT,
CONF_TYPE,
EVENT_HOMEASSISTANT_STOP,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from .conftest import (
TEST_ENTITY_NAME,
TEST_MODBUS_HOST,
TEST_MODBUS_NAME,
TEST_PORT_SERIAL,
TEST_PORT_TCP,
ReadResult,
)
from tests.common import async_fire_time_changed
@pytest.fixture(name="mock_modbus_with_pymodbus")
async def mock_modbus_with_pymodbus_fixture(hass, caplog, do_config, mock_pymodbus):
"""Load integration modbus using mocked pymodbus."""
caplog.clear()
caplog.set_level(logging.ERROR)
config = {DOMAIN: do_config}
assert await async_setup_component(hass, DOMAIN, config) is True
await hass.async_block_till_done()
assert DOMAIN in hass.config.components
assert caplog.text == ""
yield mock_pymodbus
async def test_number_validator():
"""Test number validator."""
for value, value_type in (
(15, int),
(15.1, float),
("15", int),
("15.1", float),
(-15, int),
(-15.1, float),
("-15", int),
("-15.1", float),
):
assert isinstance(number_validator(value), value_type)
try:
number_validator("x15.1")
except (vol.Invalid):
return
pytest.fail("Number_validator not throwing exception")
@pytest.mark.parametrize(
"do_config",
[
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 2,
CONF_DATA_TYPE: DataType.STRING,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_DATA_TYPE: DataType.INT32,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_DATA_TYPE: DataType.INT32,
CONF_SWAP: CONF_SWAP_BYTE,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 2,
CONF_DATA_TYPE: DataType.CUSTOM,
CONF_STRUCTURE: ">i",
CONF_SWAP: CONF_SWAP_BYTE,
},
],
)
async def test_ok_struct_validator(do_config):
"""Test struct validator."""
try:
struct_validator(do_config)
except vol.Invalid:
pytest.fail("struct_validator unexpected exception")
@pytest.mark.parametrize(
"do_config",
[
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 8,
CONF_DATA_TYPE: DataType.INT,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 8,
CONF_DATA_TYPE: DataType.CUSTOM,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 8,
CONF_DATA_TYPE: DataType.CUSTOM,
CONF_STRUCTURE: "no good",
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 20,
CONF_DATA_TYPE: DataType.CUSTOM,
CONF_STRUCTURE: ">f",
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 1,
CONF_DATA_TYPE: DataType.CUSTOM,
CONF_STRUCTURE: ">f",
CONF_SWAP: CONF_SWAP_WORD,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_COUNT: 1,
CONF_DATA_TYPE: DataType.STRING,
CONF_STRUCTURE: ">f",
CONF_SWAP: CONF_SWAP_WORD,
},
],
)
async def test_exception_struct_validator(do_config):
"""Test struct validator."""
try:
struct_validator(do_config)
except vol.Invalid:
return
pytest.fail("struct_validator missing exception")
@pytest.mark.parametrize(
"do_config",
[
[
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST + " 2",
CONF_PORT: TEST_PORT_TCP,
},
],
[
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
{
CONF_NAME: TEST_MODBUS_NAME + " 2",
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
],
],
)
async def test_duplicate_modbus_validator(do_config):
"""Test duplicate modbus validator."""
duplicate_modbus_validator(do_config)
assert len(do_config) == 1
@pytest.mark.parametrize(
"do_config",
[
[
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_SENSORS: [
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 117,
CONF_SLAVE: 0,
},
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 119,
CONF_SLAVE: 0,
},
],
}
],
[
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_SENSORS: [
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 117,
CONF_SLAVE: 0,
},
{
CONF_NAME: TEST_ENTITY_NAME + " 2",
CONF_ADDRESS: 117,
CONF_SLAVE: 0,
},
],
}
],
],
)
async def test_duplicate_entity_validator(do_config):
"""Test duplicate entity validator."""
duplicate_entity_validator(do_config)
assert len(do_config[0][CONF_SENSORS]) == 1
@pytest.mark.parametrize(
"do_config",
[
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
CONF_TIMEOUT: 30,
CONF_DELAY: 10,
},
{
CONF_TYPE: UDP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
{
CONF_TYPE: UDP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
CONF_TIMEOUT: 30,
CONF_DELAY: 10,
},
{
CONF_TYPE: RTUOVERTCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
},
{
CONF_TYPE: RTUOVERTCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
CONF_TIMEOUT: 30,
CONF_DELAY: 10,
},
{
CONF_TYPE: SERIAL,
CONF_BAUDRATE: 9600,
CONF_BYTESIZE: 8,
CONF_METHOD: "rtu",
CONF_PORT: TEST_PORT_SERIAL,
CONF_PARITY: "E",
CONF_STOPBITS: 1,
CONF_MSG_WAIT: 100,
},
{
CONF_TYPE: SERIAL,
CONF_BAUDRATE: 9600,
CONF_BYTESIZE: 8,
CONF_METHOD: "rtu",
CONF_PORT: TEST_PORT_SERIAL,
CONF_PARITY: "E",
CONF_STOPBITS: 1,
CONF_NAME: TEST_MODBUS_NAME,
CONF_TIMEOUT: 30,
CONF_DELAY: 10,
},
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_DELAY: 5,
},
[
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
},
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: f"{TEST_MODBUS_NAME} 2",
},
{
CONF_TYPE: SERIAL,
CONF_BAUDRATE: 9600,
CONF_BYTESIZE: 8,
CONF_METHOD: "rtu",
CONF_PORT: TEST_PORT_SERIAL,
CONF_PARITY: "E",
CONF_STOPBITS: 1,
CONF_NAME: f"{TEST_MODBUS_NAME} 3",
},
],
{
# Special test for scan_interval validator with scan_interval: 0
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_SENSORS: [
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 117,
CONF_SLAVE: 0,
CONF_SCAN_INTERVAL: 0,
}
],
},
],
)
async def test_config_modbus(hass, caplog, mock_modbus_with_pymodbus):
"""Run configuration test for modbus."""
VALUE = "value"
FUNC = "func"
DATA = "data"
SERVICE = "service"
@pytest.mark.parametrize(
"do_config",
[
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: SERIAL,
CONF_BAUDRATE: 9600,
CONF_BYTESIZE: 8,
CONF_METHOD: "rtu",
CONF_PORT: TEST_PORT_SERIAL,
CONF_PARITY: "E",
CONF_STOPBITS: 1,
},
],
)
@pytest.mark.parametrize(
"do_write",
[
{
DATA: ATTR_VALUE,
VALUE: 15,
SERVICE: SERVICE_WRITE_REGISTER,
FUNC: CALL_TYPE_WRITE_REGISTER,
},
{
DATA: ATTR_VALUE,
VALUE: [1, 2, 3],
SERVICE: SERVICE_WRITE_REGISTER,
FUNC: CALL_TYPE_WRITE_REGISTERS,
},
{
DATA: ATTR_STATE,
VALUE: False,
SERVICE: SERVICE_WRITE_COIL,
FUNC: CALL_TYPE_WRITE_COIL,
},
{
DATA: ATTR_STATE,
VALUE: [True, False, True],
SERVICE: SERVICE_WRITE_COIL,
FUNC: CALL_TYPE_WRITE_COILS,
},
],
)
@pytest.mark.parametrize(
"do_return",
[
{VALUE: ReadResult([0x0001]), DATA: ""},
{VALUE: ExceptionResponse(0x06), DATA: "Pymodbus:"},
{VALUE: IllegalFunctionRequest(0x06), DATA: "Pymodbus:"},
{VALUE: ModbusException("fail write_"), DATA: "Pymodbus:"},
],
)
@pytest.mark.parametrize(
"do_unit",
[
ATTR_UNIT,
ATTR_SLAVE,
],
)
async def test_pb_service_write(
hass, do_write, do_return, do_unit, caplog, mock_modbus_with_pymodbus
):
"""Run test for service write_register."""
func_name = {
CALL_TYPE_WRITE_COIL: mock_modbus_with_pymodbus.write_coil,
CALL_TYPE_WRITE_COILS: mock_modbus_with_pymodbus.write_coils,
CALL_TYPE_WRITE_REGISTER: mock_modbus_with_pymodbus.write_register,
CALL_TYPE_WRITE_REGISTERS: mock_modbus_with_pymodbus.write_registers,
}
data = {
ATTR_HUB: TEST_MODBUS_NAME,
do_unit: 17,
ATTR_ADDRESS: 16,
do_write[DATA]: do_write[VALUE],
}
mock_modbus_with_pymodbus.reset_mock()
caplog.clear()
caplog.set_level(logging.DEBUG)
func_name[do_write[FUNC]].return_value = do_return[VALUE]
await hass.services.async_call(DOMAIN, do_write[SERVICE], data, blocking=True)
assert func_name[do_write[FUNC]].called
assert func_name[do_write[FUNC]].call_args[0] == (
data[ATTR_ADDRESS],
data[do_write[DATA]],
)
if do_return[DATA]:
assert caplog.messages[-1].startswith("Pymodbus:")
@pytest.fixture(name="mock_modbus_read_pymodbus")
async def mock_modbus_read_pymodbus_fixture(
hass,
do_group,
do_type,
do_scan_interval,
do_return,
do_exception,
caplog,
mock_pymodbus,
):
"""Load integration modbus using mocked pymodbus."""
caplog.clear()
caplog.set_level(logging.ERROR)
mock_pymodbus.read_coils.side_effect = do_exception
mock_pymodbus.read_discrete_inputs.side_effect = do_exception
mock_pymodbus.read_input_registers.side_effect = do_exception
mock_pymodbus.read_holding_registers.side_effect = do_exception
mock_pymodbus.read_coils.return_value = do_return
mock_pymodbus.read_discrete_inputs.return_value = do_return
mock_pymodbus.read_input_registers.return_value = do_return
mock_pymodbus.read_holding_registers.return_value = do_return
config = {
DOMAIN: [
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
do_group: [
{
CONF_INPUT_TYPE: do_type,
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 51,
CONF_SLAVE: 0,
CONF_SCAN_INTERVAL: do_scan_interval,
}
],
}
],
}
now = dt_util.utcnow()
with mock.patch("homeassistant.helpers.event.dt_util.utcnow", return_value=now):
assert await async_setup_component(hass, DOMAIN, config) is True
await hass.async_block_till_done()
assert DOMAIN in hass.config.components
assert caplog.text == ""
now = now + timedelta(seconds=DEFAULT_SCAN_INTERVAL + 60)
with mock.patch("homeassistant.helpers.event.dt_util.utcnow", return_value=now):
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
yield mock_pymodbus
@pytest.mark.parametrize(
"do_domain, do_group,do_type,do_scan_interval",
[
[SENSOR_DOMAIN, CONF_SENSORS, CALL_TYPE_REGISTER_HOLDING, 10],
[SENSOR_DOMAIN, CONF_SENSORS, CALL_TYPE_REGISTER_INPUT, 10],
[BINARY_SENSOR_DOMAIN, CONF_BINARY_SENSORS, CALL_TYPE_DISCRETE, 10],
[BINARY_SENSOR_DOMAIN, CONF_BINARY_SENSORS, CALL_TYPE_COIL, 1],
],
)
@pytest.mark.parametrize(
"do_return,do_exception,do_expect_state,do_expect_value",
[
[ReadResult([1]), None, STATE_ON, "1"],
[IllegalFunctionRequest(0x99), None, STATE_UNAVAILABLE, STATE_UNAVAILABLE],
[ExceptionResponse(0x99), None, STATE_UNAVAILABLE, STATE_UNAVAILABLE],
[
ReadResult([1]),
ModbusException("fail read_"),
STATE_UNAVAILABLE,
STATE_UNAVAILABLE,
],
],
)
async def test_pb_read(
hass, do_domain, do_expect_state, do_expect_value, caplog, mock_modbus_read_pymodbus
):
"""Run test for different read."""
# Check state
entity_id = f"{do_domain}.{TEST_ENTITY_NAME}".replace(" ", "_")
state = hass.states.get(entity_id).state
assert hass.states.get(entity_id).state
# this if is needed to avoid explode the
if do_domain == SENSOR_DOMAIN:
do_expect = do_expect_value
else:
do_expect = do_expect_state
assert state == do_expect
async def test_pymodbus_constructor_fail(hass, caplog):
"""Run test for failing pymodbus constructor."""
config = {
DOMAIN: [
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
}
]
}
with mock.patch(
"homeassistant.components.modbus.modbus.ModbusTcpClient", autospec=True
) as mock_pb:
caplog.set_level(logging.ERROR)
mock_pb.side_effect = ModbusException("test no class")
assert await async_setup_component(hass, DOMAIN, config) is False
await hass.async_block_till_done()
message = f"Pymodbus: {TEST_MODBUS_NAME}: Modbus Error: test"
assert caplog.messages[0].startswith(message)
assert caplog.records[0].levelname == "ERROR"
assert mock_pb.called
async def test_pymodbus_close_fail(hass, caplog, mock_pymodbus):
"""Run test for failing pymodbus close."""
config = {
DOMAIN: [
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
}
]
}
caplog.set_level(logging.ERROR)
mock_pymodbus.connect.return_value = True
mock_pymodbus.close.side_effect = ModbusException("close fail")
assert await async_setup_component(hass, DOMAIN, config) is True
await hass.async_block_till_done()
# Close() is called as part of teardown
async def test_pymodbus_connect_fail(hass, caplog, mock_pymodbus):
"""Run test for failing pymodbus constructor."""
config = {
DOMAIN: [
{
CONF_NAME: TEST_MODBUS_NAME,
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
}
]
}
caplog.set_level(logging.WARNING)
ExceptionMessage = "test connect exception"
mock_pymodbus.connect.side_effect = ModbusException(ExceptionMessage)
assert await async_setup_component(hass, DOMAIN, config) is False
assert ExceptionMessage in caplog.text
async def test_delay(hass, mock_pymodbus):
"""Run test for startup delay."""
# the purpose of this test is to test startup delay
# We "hijiack" a binary_sensor to make a proper blackbox test.
set_delay = 15
set_scan_interval = 5
entity_id = f"{BINARY_SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_")
config = {
DOMAIN: [
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_NAME: TEST_MODBUS_NAME,
CONF_DELAY: set_delay,
CONF_BINARY_SENSORS: [
{
CONF_INPUT_TYPE: CALL_TYPE_COIL,
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 52,
CONF_SLAVE: 0,
CONF_SCAN_INTERVAL: set_scan_interval,
},
],
}
]
}
mock_pymodbus.read_coils.return_value = ReadResult([0x01])
start_time = dt_util.utcnow()
with mock.patch(
"homeassistant.helpers.event.dt_util.utcnow", return_value=start_time
):
assert await async_setup_component(hass, DOMAIN, config) is True
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_UNKNOWN
time_sensor_active = start_time + timedelta(seconds=2)
time_after_delay = start_time + timedelta(seconds=(set_delay))
time_after_scan = start_time + timedelta(seconds=(set_delay + set_scan_interval))
time_stop = time_after_scan + timedelta(seconds=10)
now = start_time
while now < time_stop:
now += timedelta(seconds=1)
with mock.patch(
"homeassistant.helpers.event.dt_util.utcnow",
return_value=now,
autospec=True,
):
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
if now > time_sensor_active:
if now <= time_after_delay:
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
elif now > time_after_scan:
assert hass.states.get(entity_id).state == STATE_ON
@pytest.mark.parametrize(
"do_config",
[
{
CONF_TYPE: TCP,
CONF_HOST: TEST_MODBUS_HOST,
CONF_PORT: TEST_PORT_TCP,
CONF_SENSORS: [
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 117,
CONF_SLAVE: 0,
CONF_SCAN_INTERVAL: 0,
}
],
},
],
)
async def test_shutdown(hass, caplog, mock_pymodbus, mock_modbus_with_pymodbus):
"""Run test for shutdown."""
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert mock_pymodbus.close.called
assert caplog.text == ""
@pytest.mark.parametrize(
"do_config",
[
{
CONF_SENSORS: [
{
CONF_NAME: TEST_ENTITY_NAME,
CONF_ADDRESS: 51,
CONF_SLAVE: 0,
}
]
},
],
)
async def test_stop_restart(hass, caplog, mock_modbus):
"""Run test for service stop."""
caplog.set_level(logging.INFO)
entity_id = f"{SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_")
assert hass.states.get(entity_id).state == STATE_UNKNOWN
hass.states.async_set(entity_id, 17)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "17"
mock_modbus.reset_mock()
caplog.clear()
data = {
ATTR_HUB: TEST_MODBUS_NAME,
}
await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
assert mock_modbus.close.called
assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text
mock_modbus.reset_mock()
caplog.clear()
await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True)
await hass.async_block_till_done()
assert not mock_modbus.close.called
assert mock_modbus.connect.called
assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text
mock_modbus.reset_mock()
caplog.clear()
await hass.services.async_call(DOMAIN, SERVICE_RESTART, data, blocking=True)
await hass.async_block_till_done()
assert mock_modbus.close.called
assert mock_modbus.connect.called
assert f"modbus {TEST_MODBUS_NAME} communication closed" in caplog.text
assert f"modbus {TEST_MODBUS_NAME} communication open" in caplog.text
|
py | b416ee0363ac77af972ac1ee93993b985837ed4d | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class PyAzureMgmtMedia(PythonPackage):
"""Microsoft Azure Media Services Client Library for Python."""
homepage = "https://github.com/Azure/azure-sdk-for-python"
pypi = "azure-mgmt-media/azure-mgmt-media-2.2.0.zip"
version('2.2.0', sha256='0adeee9e654a9011f5107def06fea6838864a3514a1e5a9ed495f3a56a687cc7')
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:1.999', type=('build', 'run'))
depends_on('[email protected]:1.999', type=('build', 'run'))
depends_on('py-azure-mgmt-nspkg', when='^python@:2', type=('build', 'run'))
|
py | b416ee800fd1a590a168d3a20ce0938b7498994f | # -*- coding: utf-8 -*-
'''
@author: longxin
@version: 1.0
@date:
@changeVersion:
@changeAuthor:
@description:
'''
class operation:
def __init__(self, operationid, operationfuction):
self.operationid = operationid
self.operationfunction = operationfuction
def checkid(self, operationid):
if self.operationid == operationid:
return True
else:
return False
def getid(self):
return self.operationid
def excute(self, inputdata):
return self.operationfunction(inputdata)
class ExecuteAgent:
# 定义函数
def __func0__(self, input):
tmp = input[0] + 1
return tmp
def __func1__(self, input):
tmp = input[0] - 1
return tmp
def __func2__(self, input):
tmp = input[0] * 2
return tmp
def __func3__(self, input):
tmp = input[0] * input[1]
return tmp
def __func4__(self, input):
tmp = input[0] * 0.5
return tmp
def __func5__(self, input):
tmp = input[0] * input[1]
return tmp
def __func6__(self, input):
tmp = input[0] + input[1]
return tmp
def __init__(self):
self.operations = []
operation0 = operation(0, self.__func0__)
operation1 = operation(1, self.__func1__)
operation2 = operation(2, self.__func2__)
operation3 = operation(3, self.__func3__)
operation4 = operation(4, self.__func4__)
operation5 = operation(5, self.__func5__)
operation6 = operation(6, self.__func6__)
self.operations.append(operation0)
self.operations.append(operation1)
self.operations.append(operation2)
self.operations.append(operation3)
self.operations.append(operation4)
self.operations.append(operation5)
self.operations.append(operation6)
def excute(self, operationid, inputdata):
# #检查是否有操作id 检查输入数据格式
# if int(operationid) >= len(self.operations)-1 or operationid < 0:
# return None
#
# if not isinstance(inputdata, list):
# return None
return self.operations[operationid].excute(inputdata)
|
py | b416eef3e0ea136129c567b0527898a038774480 | import os,sys, psycopg2,flask,jsonify,request,json
from flask_cors import CORS
conn_string = "host='localhost' dbname='swift' user='postgres' password='Guatemala1'"
route = r"/api/mobil/"
def getMessages_old(toMsg):
from psycopg2.extras import RealDictCursor
conn = psycopg2.connect(conn_string)
list = []
cursor = conn.cursor(cursor_factory=RealDictCursor)
cursor.execute("""select msg from public."offlineChat" where tomsg = '"""+ toMsg +"'")
l = json.dumps(cursor.fetchall(),indent = 2)
conn.close()
borrarMensaje(toMsg)
return l
def getMessages(toMsg):
conn = psycopg2.connect(conn_string)
cursor = conn.cursor()
cursor.execute("""select msg from public."offlineChat" where tomsg = '"""+ toMsg +"'")
rows = cursor.fetchall()
#list = []
s = "[ "
for row in rows:
s = s + row[0] + ","
conn.close()
ret = s[:-1] + " ]"
#print (ret)
borrarMensaje(toMsg)
return ret
def grabaMensaje(data):
dataStr = str(data).replace("'",'"')
#print(dataStr)
try:
conn = psycopg2.connect(conn_string)
cursor = conn.cursor()
q = """insert into public."offlineChat" (msg,tomsg,sent,datetime) values('"""+ dataStr +"','"+ data['to'] +"',False,'" + data['dateTime'] +"')"
print(q)
cursor.execute(q)
conn.commit()
conn.close()
return """{\"id\":\""""+ str(data['id']) +"""\",\"msg\":\"ok\"}"""
except:
return """{\"id\":\""""+ str(data['id']) +"""\",\"msg\":\"error\"}"""
def borrarMensaje(tomsg):
conn = psycopg2.connect(conn_string)
cursor = conn.cursor()
q = """ delete from public."offlineChat" where tomsg = '"""+ tomsg +"' "
#print(q)
cursor.execute(q)
conn.commit()
conn.close()
from flask import Flask,request
app = Flask(__name__)
cors = CORS(app, resources={r"/api/*": {"origins": "*"}})
@app.route(route + 'getMessages',methods=['GET'])
def getMsgs():
id = request.args.get('id')
#msg = request.args.get('msg')
#print('get messages from: ' + id)
return getMessages(id)
@app.route(route + 'postMessages',methods=['POST'])
def postMsg():
data = request.get_json()
return grabaMensaje(data)
if __name__ == '__main__':
#app.run(debug=True)
print("starting chat service port 1500")
#app.run(ssl_context='adhoc',host='0.0.0.0', port=1500)
app.run(host='0.0.0.0', port=1500)
|
py | b416ef5c8d1ce38a8b430183dd7b518d5bbad62b | # -*- coding: utf-8 -*-
"""
pagarmeapisdk
This file was automatically generated by APIMATIC v3.0 (
https://www.apimatic.io ).
"""
from pagarmeapisdk.models.create_address_request import CreateAddressRequest
from pagarmeapisdk.models.create_checkout_bank_transfer_request import CreateCheckoutBankTransferRequest
from pagarmeapisdk.models.create_checkout_boleto_payment_request import CreateCheckoutBoletoPaymentRequest
from pagarmeapisdk.models.create_checkout_credit_card_payment_request import CreateCheckoutCreditCardPaymentRequest
from pagarmeapisdk.models.create_checkout_debit_card_payment_request import CreateCheckoutDebitCardPaymentRequest
from pagarmeapisdk.models.create_checkout_pix_payment_request import CreateCheckoutPixPaymentRequest
class CreateCheckoutPaymentRequest(object):
"""Implementation of the 'CreateCheckoutPaymentRequest' model.
Checkout payment request
Attributes:
accepted_payment_methods (list of string): Accepted Payment Methods
accepted_multi_payment_methods (list of object): Accepted Multi
Payment Methods
success_url (string): Success url
default_payment_method (string): Default payment method
gateway_affiliation_id (string): Gateway Affiliation Id
credit_card (CreateCheckoutCreditCardPaymentRequest): Credit Card
payment request
debit_card (CreateCheckoutDebitCardPaymentRequest): Debit Card payment
request
boleto (CreateCheckoutBoletoPaymentRequest): Boleto payment request
customer_editable (bool): Customer is editable?
expires_in (int): Time in minutes for expiration
skip_checkout_success_page (bool): Skip postpay success screen?
billing_address_editable (bool): Billing Address is editable?
billing_address (CreateAddressRequest): Billing Address
bank_transfer (CreateCheckoutBankTransferRequest): Bank Transfer
payment request
accepted_brands (list of string): Accepted Brands
pix (CreateCheckoutPixPaymentRequest): Pix payment request
"""
# Create a mapping from Model property names to API property names
_names = {
"accepted_payment_methods": 'accepted_payment_methods',
"accepted_multi_payment_methods": 'accepted_multi_payment_methods',
"success_url": 'success_url',
"skip_checkout_success_page": 'skip_checkout_success_page',
"billing_address_editable": 'billing_address_editable',
"billing_address": 'billing_address',
"accepted_brands": 'accepted_brands',
"default_payment_method": 'default_payment_method',
"gateway_affiliation_id": 'gateway_affiliation_id',
"credit_card": 'credit_card',
"debit_card": 'debit_card',
"boleto": 'boleto',
"customer_editable": 'customer_editable',
"expires_in": 'expires_in',
"bank_transfer": 'bank_transfer',
"pix": 'pix'
}
def __init__(self,
accepted_payment_methods=None,
accepted_multi_payment_methods=None,
success_url=None,
skip_checkout_success_page=None,
billing_address_editable=None,
billing_address=None,
accepted_brands=None,
default_payment_method=None,
gateway_affiliation_id=None,
credit_card=None,
debit_card=None,
boleto=None,
customer_editable=None,
expires_in=None,
bank_transfer=None,
pix=None):
"""Constructor for the CreateCheckoutPaymentRequest class"""
# Initialize members of the class
self.accepted_payment_methods = accepted_payment_methods
self.accepted_multi_payment_methods = accepted_multi_payment_methods
self.success_url = success_url
self.default_payment_method = default_payment_method
self.gateway_affiliation_id = gateway_affiliation_id
self.credit_card = credit_card
self.debit_card = debit_card
self.boleto = boleto
self.customer_editable = customer_editable
self.expires_in = expires_in
self.skip_checkout_success_page = skip_checkout_success_page
self.billing_address_editable = billing_address_editable
self.billing_address = billing_address
self.bank_transfer = bank_transfer
self.accepted_brands = accepted_brands
self.pix = pix
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object
as obtained from the deserialization of the server's response. The
keys MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
accepted_payment_methods = dictionary.get('accepted_payment_methods')
accepted_multi_payment_methods = dictionary.get('accepted_multi_payment_methods')
success_url = dictionary.get('success_url')
skip_checkout_success_page = dictionary.get('skip_checkout_success_page')
billing_address_editable = dictionary.get('billing_address_editable')
billing_address = CreateAddressRequest.from_dictionary(dictionary.get('billing_address')) if dictionary.get('billing_address') else None
accepted_brands = dictionary.get('accepted_brands')
default_payment_method = dictionary.get('default_payment_method')
gateway_affiliation_id = dictionary.get('gateway_affiliation_id')
credit_card = CreateCheckoutCreditCardPaymentRequest.from_dictionary(dictionary.get('credit_card')) if dictionary.get('credit_card') else None
debit_card = CreateCheckoutDebitCardPaymentRequest.from_dictionary(dictionary.get('debit_card')) if dictionary.get('debit_card') else None
boleto = CreateCheckoutBoletoPaymentRequest.from_dictionary(dictionary.get('boleto')) if dictionary.get('boleto') else None
customer_editable = dictionary.get('customer_editable')
expires_in = dictionary.get('expires_in')
bank_transfer = CreateCheckoutBankTransferRequest.from_dictionary(dictionary.get('bank_transfer')) if dictionary.get('bank_transfer') else None
pix = CreateCheckoutPixPaymentRequest.from_dictionary(dictionary.get('pix')) if dictionary.get('pix') else None
# Return an object of this model
return cls(accepted_payment_methods,
accepted_multi_payment_methods,
success_url,
skip_checkout_success_page,
billing_address_editable,
billing_address,
accepted_brands,
default_payment_method,
gateway_affiliation_id,
credit_card,
debit_card,
boleto,
customer_editable,
expires_in,
bank_transfer,
pix)
|
py | b416f09816c1df1333a197fa811b7941be5f4768 | """
Example script showing how to simulate expected counts
in the CTA energy range
"""
from gammapy.spectrum.models import LogParabola
from gammapy.scripts import CTAPerf
from gammapy.scripts.cta_utils import CTASpectrumObservation, Target, ObservationParameters
import astropy.units as u
import time
# Observation parameters
alpha = 0.2 * u.Unit('')
livetime = 5. * u.h
emin = 0.03 * u.TeV
emax = 5 * u.TeV
obs_param = ObservationParameters(alpha=alpha, livetime=livetime,
emin=emin, emax=emax)
# Target, PKS 2155-304 from 3FHL
name = "2155"
# model parameters
alpha = 1.88 * u.Unit('')
beta = 0.15 * u.Unit('')
reference = 18.3 * u.GeV
amplitude = 7.7e-11 * u.Unit('cm-2 s-1 GeV-1')
model = LogParabola(alpha=alpha, beta=beta, reference=reference, amplitude=amplitude)
# redshift
redshift = 0.116
# EBL model
ebl_model_name = 'dominguez'
target = Target(name=name, model=model,
redshift=redshift,
ebl_model_name=ebl_model_name)
# Performance
filename = '$GAMMAPY_EXTRA/datasets/cta/perf_prod2/point_like_non_smoothed/South_5h.fits.gz'
cta_perf = CTAPerf.read(filename)
# Simulation
t_start = time.clock()
simu = CTASpectrumObservation.simulate_obs(perf=cta_perf,
target=target,
obs_param=obs_param)
t_end = time.clock()
print(simu)
print('\nsimu done in {} s'.format(t_end-t_start))
CTASpectrumObservation.plot_simu(simu, target)
|
py | b416f09a3a17527eccbd131917ca257b39bc7884 | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
__dir__ = os.path.dirname(__file__)
sys.path.append(__dir__)
sys.path.append(os.path.join(__dir__, '..', '..', '..'))
sys.path.append(os.path.join(__dir__, '..', '..', '..', 'tools'))
import json
import cv2
from paddle import fluid
import paddleslim as slim
from copy import deepcopy
from tools.eval_utils.eval_det_utils import eval_det_run
from tools import program
from ppocr.utils.utility import initial_logger
from ppocr.data.reader_main import reader_main
from ppocr.utils.save_load import init_model
from ppocr.utils.character import CharacterOps
from ppocr.utils.utility import create_module
from ppocr.data.reader_main import reader_main
logger = initial_logger()
def get_pruned_params(program):
params = []
for param in program.global_block().all_parameters():
if len(
param.shape
) == 4 and 'depthwise' not in param.name and 'transpose' not in param.name:
params.append(param.name)
return params
def eval_function(eval_args, mode='eval'):
exe = eval_args['exe']
config = eval_args['config']
eval_info_dict = eval_args['eval_info_dict']
metrics = eval_det_run(exe, config, eval_info_dict, mode=mode)
return metrics['hmean']
def main():
config = program.load_config(FLAGS.config)
program.merge_config(FLAGS.opt)
logger.info(config)
# check if set use_gpu=True in paddlepaddle cpu version
use_gpu = config['Global']['use_gpu']
program.check_gpu(use_gpu)
alg = config['Global']['algorithm']
assert alg in ['EAST', 'DB', 'Rosetta', 'CRNN', 'STARNet', 'RARE']
if alg in ['Rosetta', 'CRNN', 'STARNet', 'RARE']:
config['Global']['char_ops'] = CharacterOps(config['Global'])
place = fluid.CUDAPlace(0) if use_gpu else fluid.CPUPlace()
startup_prog = fluid.Program()
eval_program = fluid.Program()
eval_build_outputs = program.build(
config, eval_program, startup_prog, mode='test')
eval_fetch_name_list = eval_build_outputs[1]
eval_fetch_varname_list = eval_build_outputs[2]
eval_program = eval_program.clone(for_test=True)
exe = fluid.Executor(place)
exe.run(startup_prog)
init_model(config, eval_program, exe)
eval_reader = reader_main(config=config, mode="eval")
eval_info_dict = {'program':eval_program,\
'reader':eval_reader,\
'fetch_name_list':eval_fetch_name_list,\
'fetch_varname_list':eval_fetch_varname_list}
eval_args = dict()
eval_args = {'exe': exe, 'config': config, 'eval_info_dict': eval_info_dict}
metrics = eval_function(eval_args)
print("Baseline: {}".format(metrics))
params = get_pruned_params(eval_program)
print('Start to analyze')
sens_0 = slim.prune.sensitivity(
eval_program,
place,
params,
eval_function,
sensitivities_file="sensitivities_0.data",
pruned_ratios=[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8],
eval_args=eval_args,
criterion='geometry_median')
if __name__ == '__main__':
parser = program.ArgsParser()
FLAGS = parser.parse_args()
main()
|
py | b416f0b5d60441ef066b6926077ea5df45fac43f | #!/usr/bin/env python3
# Copyright (c) 2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the generation of UTXO snapshots using `dumptxoutset`.
"""
import hashlib
from pathlib import Path
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
class DumptxoutsetTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
"""Test a trivial usage of the dumptxoutset RPC command."""
node = self.nodes[0]
mocktime = node.getblockheader(node.getblockhash(0))['time'] + 1
node.setmocktime(mocktime)
node.generate(100)
FILENAME = 'txoutset.dat'
out = node.dumptxoutset(FILENAME)
expected_path = Path(node.datadir) / self.chain / FILENAME
assert expected_path.is_file()
assert_equal(out['coins_written'], 100)
assert_equal(out['base_height'], 100)
assert_equal(out['path'], str(expected_path))
# Blockhash should be deterministic based on mocked time.
assert_equal(
out['base_hash'],
'65d0aec2439aae14373c153f596fb90a87b643d9bff3e65f250aa8f055e6816b')
with open(str(expected_path), 'rb') as f:
digest = hashlib.sha256(f.read()).hexdigest()
# UTXO snapshot hash should be deterministic based on mocked time.
assert_equal(
digest,
'a92dc32a15975b3c84bb1e6ac5218ff94194b4ea7d1b9372fb80184a7533a89f')
# Specifying a path to an existing file will fail.
assert_raises_rpc_error(
-8, '{} already exists'.format(FILENAME), node.dumptxoutset, FILENAME)
if __name__ == '__main__':
DumptxoutsetTest().main()
|
py | b416f217aa7381a2e60cad577d79bdb7d0c53cd0 | #!/usr/bin/env python
"""
@package mi.core.instrument.protocol_param_dict
@file mi/core/instrument/protocol_param_dict.py
@author Edward Hunter
@author Steve Foley
@brief A dictionary class that manages, matches and formats device parameters.
"""
__author__ = 'Edward Hunter'
__license__ = 'Apache 2.0'
import re
import ntplib
import time
import yaml
import pkg_resources
from mi.core.common import BaseEnum
from mi.core.exceptions import InstrumentParameterException
from mi.core.exceptions import InstrumentParameterExpirationException
from mi.core.instrument.instrument_dict import InstrumentDict
from mi.core.log import get_logger ; log = get_logger()
EGG_PATH = "resource"
DEFAULT_FILENAME = "strings.yml"
class ParameterDictType(BaseEnum):
BOOL = "bool"
INT = "int"
STRING = "string"
FLOAT = "float"
LIST = "list"
ENUM = "enum"
class ParameterDictVisibility(BaseEnum):
READ_ONLY = "READ_ONLY" # Can not be set by the driver at any time
READ_WRITE = "READ_WRITE" # Can be set by the driver or the operator
IMMUTABLE = "IMMUTABLE" # Can only be set by the driver during startup
DIRECT_ACCESS = "DIRECT_ACCESS"
class ParameterDictKey(BaseEnum):
"""
These are the output strings when generating a metadata block. They also
line up with incoming YAML strings where appropriate.
"""
GET_TIMEOUT = "get_timeout"
SET_TIMEOUT = "set_timeout"
VISIBILITY = "visibility"
STARTUP = "startup"
DIRECT_ACCESS = "direct_access"
DISPLAY_NAME = "display_name"
DESCRIPTION = "description"
VALUE = "value"
TYPE = "type"
DEFAULT = "default"
UNITS = "units"
PARAMETERS = "parameters"
VALUE_DESCRIPTION = "value_description"
class ParameterDescription(object):
"""
An object handling the descriptive (and largely staticly defined in code)
qualities of a parameter.
"""
def __init__(self,
name,
visibility=ParameterDictVisibility.READ_WRITE,
direct_access=False,
startup_param=False,
default_value=None,
init_value=None,
menu_path_read=None,
submenu_read=None,
menu_path_write=None,
submenu_write=None,
multi_match=None,
get_timeout=10,
set_timeout=10,
display_name=None,
description=None,
type=None,
units=None,
value_description=None):
self.name = name
self.visibility = visibility
self.direct_access = direct_access
self.startup_param = startup_param
self.default_value = default_value
self.init_value = init_value
self.menu_path_read = menu_path_read
self.submenu_read = submenu_read
self.menu_path_write = menu_path_write
self.submenu_write = submenu_write
self.multi_match = multi_match
self.get_timeout = get_timeout
self.set_timeout = set_timeout
self.display_name = display_name
self.description = description
if ParameterDictType.has(type) or type == None:
self.type = type
else:
raise InstrumentParameterException("Invalid type specified!")
self.units = units
self.value_description = value_description
class ParameterValue(object):
"""
A parameter's actual value and the information required for updating it
"""
def __init__(self, name, f_format, value=None, expiration=None):
self.name = name
self.value = value
self.f_format = f_format
self.expiration = expiration
self.timestamp = ntplib.system_to_ntp_time(time.time())
def set_value(self, new_val):
"""
Set the stored value to the new value
@param new_val The new value to set for the parameter
"""
self.value = new_val
self.timestamp = ntplib.system_to_ntp_time(time.time())
def get_value(self, baseline_timestamp=None):
"""
Get the value from this structure, do whatever checks are necessary
@param: baseline_timestamp use this time for expiration calculation, default to current time
@raises InstrumentParameterExpirationException when a parameter is
too old to work with. Original value is in exception.
"""
if(baseline_timestamp == None):
baseline_timestamp = ntplib.system_to_ntp_time(time.time())
if (self.expiration != None) and baseline_timestamp > (self.timestamp + self.expiration):
raise InstrumentParameterExpirationException("Value for %s expired!" % self.name, self.value)
else:
return self.value
class Parameter(object):
"""
A parameter dictionary item.
"""
def __init__(self, name, f_format, value=None,
visibility=ParameterDictVisibility.READ_WRITE,
menu_path_read=None,
submenu_read=None,
menu_path_write=None,
submenu_write=None,
multi_match=False,
direct_access=False,
startup_param=False,
default_value=None,
init_value=None,
expiration=None,
get_timeout=10,
set_timeout=10,
display_name=None,
description=None,
type=None,
units=None,
value_description=None):
"""
Parameter value constructor.
@param name The parameter name.
@param f_format The function that formats the parameter value for a set command.
@param visibility The ParameterDictVisibility value that indicates what
the access to this parameter is
@param menu_path The path of menu options required to get to the parameter
value display when presented in a menu-based instrument
@param value The parameter value (initializes to None).
"""
self.description = ParameterDescription(name,
menu_path_read=menu_path_read,
submenu_read=submenu_read,
menu_path_write=menu_path_write,
submenu_write=submenu_write,
multi_match=multi_match,
visibility=visibility,
direct_access=direct_access,
startup_param=startup_param,
default_value=default_value,
init_value=init_value,
get_timeout=get_timeout,
set_timeout=set_timeout,
display_name=display_name,
description=description,
type=type,
units=units,
value_description=value_description)
self.value = ParameterValue(name, f_format, value=value,
expiration=expiration)
self.name = name
def update(self, input):
"""
Attempt to udpate a parameter value. By default, this assumes the input
will be new new value. In subclasses, this must be updated to handle
a real string of data appropriately.
@param input A string that is the parameter value.
@retval True if an update was successful, False otherwise.
"""
self.value.set_value(input)
return True
def get_value(self, timestamp=None):
"""
Get the value of the parameter that has been stored in the ParameterValue
object.
@param timestamp timestamp to use for expiration calculation
@retval The actual data value if it is valid
@raises InstrumentParameterExpirationException If the value has expired
"""
return self.value.get_value(timestamp)
class RegexParameter(Parameter):
def __init__(self, name, pattern, f_getval, f_format, value=None,
visibility=ParameterDictVisibility.READ_WRITE,
menu_path_read=None,
submenu_read=None,
menu_path_write=None,
submenu_write=None,
multi_match=False,
direct_access=False,
startup_param=False,
default_value=None,
init_value=None,
regex_flags=0,
expiration=None,
get_timeout=10,
set_timeout=10,
display_name=None,
description=None,
type=None,
units=None,
value_description=None):
"""
Parameter value constructor.
@param name The parameter name.
@param pattern The regex that matches the parameter in line output.
@param f_getval The fuction that extracts the value from a regex match.
@param f_format The function that formats the parameter value for a set command.
@param visibility The ParameterDictVisibility value that indicates what
the access to this parameter is
@param menu_path The path of menu options required to get to the parameter
value display when presented in a menu-based instrument
@param value The parameter value (initializes to None).
@param regex_flags Flags that should be passed to the regex in this
parameter. Should comply with regex compile() interface (XORed flags).
@throws TypeError if regex flags are bad
@see ProtocolParameterDict.add() for details of parameters
"""
Parameter.__init__(self,
name,
f_format,
value=value,
visibility=visibility,
menu_path_read=menu_path_read,
submenu_read=submenu_read,
menu_path_write=menu_path_write,
submenu_write=submenu_write,
multi_match=multi_match,
direct_access=direct_access,
startup_param=startup_param,
default_value=default_value,
init_value=init_value,
expiration=expiration,
get_timeout=get_timeout,
set_timeout=set_timeout,
display_name=display_name,
description=description,
type=type,
units=units,
value_description=value_description)
self.regex = re.compile(pattern, regex_flags)
self.f_getval = f_getval
def update(self, input):
"""
Attempt to update a parameter value. If the input string matches the
value regex, extract and update the dictionary value.
@param input A string possibly containing the parameter value.
@retval True if an update was successful, False otherwise.
"""
if not (isinstance(input, str)):
match = self.regex.search(str(input))
else:
match = self.regex.search(input)
if match:
self.value.set_value(self.f_getval(match))
return True
else:
return False
class FunctionParameter(Parameter):
def __init__(self, name, f_getval, f_format, value=None,
visibility=ParameterDictVisibility.READ_WRITE,
menu_path_read=None,
submenu_read=None,
menu_path_write=None,
submenu_write=None,
multi_match=False,
direct_access=False,
startup_param=False,
default_value=None,
init_value=None,
expiration=None,
get_timeout=10,
set_timeout=10,
display_name=None,
description=None,
type=None,
units=None,
value_description=None):
"""
Parameter value constructor.
@param name The parameter name.
@param f_getval The fuction that extracts the value from a regex match.
If no value is found for extraction, this function should return
something false.
@param f_format The function that formats the parameter value for a set command.
@param visibility The ParameterDictVisibility value that indicates what
the access to this parameter is
@param menu_path The path of menu options required to get to the parameter
value display when presented in a menu-based instrument
@param value The parameter value (initializes to None).
"""
Parameter.__init__(self,
name,
f_format,
value=value,
visibility=visibility,
menu_path_read=menu_path_read,
submenu_read=submenu_read,
menu_path_write=menu_path_write,
submenu_write=submenu_write,
multi_match=multi_match,
direct_access=direct_access,
startup_param=startup_param,
default_value=default_value,
init_value=init_value,
expiration=expiration,
get_timeout=get_timeout,
set_timeout=set_timeout,
display_name=display_name,
description=description,
type=type,
units=units,
value_description=value_description)
self.f_getval = f_getval
def update(self, input):
"""
Attempt to udpate a parameter value. The input string is run through
the filtering function to obtain a value.
@param input A string possibly containing the parameter value in some
format.
@retval True if a change was made to the value, false if the value is
the same as it was before. Since the result of the supplied function
could be anything (boolean included), there isnt any way to tell the
success or failure of the match...all update methods run. The result
is a change flag.
"""
orig_value = self.value.get_value()
result = self.f_getval(input)
if result != orig_value:
self.value.set_value(result)
log.trace('Updated parameter %s=%s', self.name, self.value.get_value())
return True
else:
return False
class ProtocolParameterDict(InstrumentDict):
"""
Protocol parameter dictionary. Manages, matches and formats device
parameters.
"""
def __init__(self):
"""
Constructor.
"""
self._param_dict = {}
def add(self,
name,
pattern,
f_getval,
f_format,
value=None,
visibility=ParameterDictVisibility.READ_WRITE,
menu_path_read=None,
submenu_read=None,
menu_path_write=None,
submenu_write=None,
multi_match=False,
direct_access=False,
startup_param=False,
default_value=None,
init_value=None,
get_timeout=10,
set_timeout=10,
display_name=None,
description=None,
type=None,
units=None,
regex_flags=0,
value_description=None,
expiration=None):
"""
Add a parameter object to the dictionary using a regex for extraction.
@param name The parameter name.
@param pattern The regex that matches the parameter in line output.
@param f_getval The fuction that extracts the value from a regex match.
@param f_format The function that formats the parameter value for a set command.
@param visibility The ParameterDictVisibility value that indicates what
the access to this parameter is
@param menu_path The path of menu options required to get to the parameter
value display when presented in a menu-based instrument
@param direct_access T/F for tagging this as a direct access parameter
to be saved and restored in and out of direct access
@param startup_param T/F for tagging this as a startup parameter to be
applied when the instrument is first configured
@param default_value The default value to use for this parameter when
a value is needed, but no other instructions have been provided.
@param init_value The value that a parameter should be set to during
initialization or re-initialization
@param value The parameter value (initializes to None).
@param get_timeout The number of seconds that should be used as a timeout
when getting the value from the instrument
@param set_timeout The number of seconds that should be used as a timeout
when setting the value to the instrument
@param display_name The string to use for displaying the parameter
or a prompt for the parameter value
@param description The description of what the parameter is
@param type The type of the parameter (int, float, etc.) Should be a
ParameterDictType object
@param regex_flags Flags that should be passed to the regex in this
parameter. Should comply with regex compile() interface (XORed flags).
@param units The units of the value (ie "Hz" or "cm")
@param value_description The description of what values are valid
for the parameter
@param expiration The amount of time in seconds before the value
expires and should not be used. If set to None, the value is always
valid. If set to 0, the value is never valid from the store.
"""
val = RegexParameter(name, pattern, f_getval, f_format,
value=value,
visibility=visibility,
menu_path_read=menu_path_read,
submenu_read=submenu_read,
menu_path_write=menu_path_write,
submenu_write=submenu_write,
multi_match=multi_match,
direct_access=direct_access,
startup_param=startup_param,
default_value=default_value,
init_value=init_value,
expiration=expiration,
get_timeout=get_timeout,
set_timeout=set_timeout,
display_name=display_name,
description=description,
type=type,
regex_flags=regex_flags,
units=units,
value_description=value_description)
self._param_dict[name] = val
def add_parameter(self, parameter):
"""
Add a Parameter object to the dictionary or replace an existing one.
The value can be any object that is an instance of the Parameter class
or subclasses. This is the preferred method for adding these entries as
they allow the user to choose the type of parameter to be used
and make testing more straightforward.
@param parameter The Parameter object to use
"""
if not (isinstance(parameter, Parameter)):
raise InstrumentParameterException(
"Invalid Parameter added! Attempting to add: %s" % parameter)
self._param_dict[parameter.name] = parameter
def get(self, name, timestamp=None):
"""
Get a parameter value from the dictionary.
@param name Name of the value to be retrieved.
@param timestamp Timestamp to use for expiration calculation
@raises KeyError if the name is invalid.
"""
return self._param_dict[name].get_value(timestamp)
def get_current_timestamp(self, offset=0):
"""
Get the current time in a format suitable for parameter expiration calculation.
@param offset: seconds from the current time to offset the timestamp
@return: a unix timestamp
"""
return ntplib.system_to_ntp_time(time.time()) + offset
def get_config_value(self, name):
"""
Get a parameter's startup configuration value based on a search
priority.
1. User initialization value
2. Driver default value
3. Current value if set via update method
4. None if no value could be determined
@param name Name of the value to be retrieved.
@return A startup configuration value if one could be found
otherwise None
@raises KeyError if the name is invalid.
"""
result = self.get_init_value(name)
if result != None:
log.trace("Got init value for %s: %s", name, result)
return result
result = self.get_default_value(name)
if result != None:
log.trace("Got default value for %s: %s", name, result)
return result
# Currently we don't have a way to determine if a value was
# set explicitly or via some data handler. The updated flag
# doesn't work because the update method is called in both
# instances
# result = self.get(name)
#if result != None and self._param_dict[name].updated == True:
# log.trace("Got current value for %s: %s", name, result)
# return result
return None
def get_init_value(self, name):
"""
Get a parameter's init value from the dictionary.
@param name Name of the value to be retrieved.
@raises KeyError if the name is invalid.
@raises InstrumentParameterException if the description is missing
"""
if not self._param_dict[name].description:
raise InstrumentParameterException("No description present!")
return self._param_dict[name].description.init_value
def get_default_value(self, name):
"""
Get a parameter's default value from the dictionary.
@param name Name of the value to be retrieved.
@raises KeyError if the name is invalid.
@raises InstrumentParameterException if the description is missing
"""
if not self._param_dict[name].description:
raise InstrumentParameterException("No description present!")
return self._param_dict[name].description.default_value
def set_value(self, name, value):
"""
Set a parameter's value in the dictionary. While this is a simple,
straight forward way of setting things, the update routine might be
a more graceful (and possibly more robust) way to automatically
handling strings directly from an instrument. Consider using update()
wherever it makes sense.
@param name The parameter name.
@param value The parameter object to insert (and possibly overwrite)
into the parameter dictionary.
@raises KeyError if the name is invalid.
@see ProtocolParameterDict.update()
"""
log.debug("Setting parameter dict name: %s to value: %s", name, value)
self._param_dict[name].value.set_value(value)
def set_default(self, name):
"""
Set the value to the default value stored in the param dict
@raise KeyError if the name is invalid
@raise ValueError if the default_value is missing
@raises InstrumentParameterException if the description is missing
"""
if not self._param_dict[name].description:
raise InstrumentParameterException("No description present!")
if self._param_dict[name].description.default_value is not None:
self._param_dict[name].value.set_value(self._param_dict[name].description.default_value)
else:
raise ValueError("Missing default value")
def set_init_value(self, name, value):
"""
Set the value to the default value stored in the param dict
@param The parameter name to add to
@param The value to set for the initialization variable
@raise KeyError if the name is invalid
@raises InstrumentParameterException if the description is missing
"""
if not self._param_dict[name].description:
raise InstrumentParameterException("No description present!")
self._param_dict[name].description.init_value = value
def get_menu_path_read(self, name):
"""
Get the read menu path parameter value from the dictionary.
@param name Name of the value to be retrieved.
@raises KeyError if the name is invalid.
@raises InstrumentParameterException if the description is missing
"""
if not self._param_dict[name].description:
raise InstrumentParameterException("No description present!")
return self._param_dict[name].description.menu_path_read
def get_submenu_read(self, name):
"""
Get the read final destination submenu parameter value from the dictionary.
@param name Name of the value to be retrieved.
@raises KeyError if the name is invalid.
@raises InstrumentParameterException if the description is missing
"""
if not self._param_dict[name].description:
raise InstrumentParameterException("No description present!")
return self._param_dict[name].description.submenu_read
def get_menu_path_write(self, name):
"""
Get the write menu path parameter value from the dictionary.
@param name Name of the value to be retrieved.
@raises KeyError if the name is invalid.
@raises InstrumentParameterException if the description is missing
"""
if not self._param_dict[name].description:
raise InstrumentParameterException("No description present!")
return self._param_dict[name].description.menu_path_write
def get_submenu_write(self, name):
"""
Get the write final destination parameter value from the dictionary.
@param name Name of the value to be retrieved.
@raises KeyError if the name is invalid.
@raises InstrumentParameterException if the description is missing
"""
if not self._param_dict[name].description:
raise InstrumentParameterException("No description present!")
return self._param_dict[name].description.submenu_write
# RAU Added
def multi_match_update(self, input):
"""
Update the dictionaray with a line input. Iterate through all objects
and attempt to match and update (a) parameter(s).
@param input A string to match to a dictionary object.
@retval The count of successfully updated parameters, 0 if not updated
"""
hit_count = 0
multi_mode = False
for (name, val) in self._param_dict.iteritems():
if multi_mode == True and val.description.multi_match == False:
continue
if val.update(input):
hit_count =hit_count +1
if False == val.description.multi_match:
return hit_count
else:
multi_mode = True
if False == multi_mode and input <> "":
log.debug("protocol_param_dict.py UNMATCHCHED ***************************** %s", input)
return hit_count
def update_many(self, input):
"""
Take in multiple inputs and update many parameters at once.
@param input a line or lines of input to parse
@retval A dict with the names and values that were updated
"""
result = {}
for (name, val) in self._param_dict.iteritems():
update_result = val.update(input)
if update_result:
result[name] = update_result
return result
def update(self, input, target_params=None):
"""
Update the dictionaray with a line input. Iterate through all objects
and attempt to match and update a parameter. Only updates the first
match encountered. If we pass in a target params list then will will
only iterate through those allowing us to limit upstate to only specific
parameters.
@param input A string to match to a dictionary object.
@param target_params a name, or list of names to limit the scope of
the update.
@retval The name that was successfully updated, None if not updated
@raise InstrumentParameterException on invalid target prams
@raise KeyError on invalid parameter name
"""
log.debug("update input: %s", input)
found = False
if(target_params and isinstance(target_params, str)):
params = [target_params]
elif(target_params and isinstance(target_params, list)):
params = target_params
elif(target_params == None):
params = self._param_dict.keys()
else:
raise InstrumentParameterException("invalid target_params, must be name or list")
for name in params:
log.trace("update param dict name: %s", name)
val = self._param_dict[name]
if val.update(input):
found = True
return found
def get_all(self, timestamp=None):
"""
Retrive the configuration (all settable key values).
@param timestamp baseline timestamp to use for expiration
@retval name : value configuration dict.
"""
config = {}
for (key, val) in self._param_dict.iteritems():
config[key] = val.get_value(timestamp)
return config
def get_config(self):
"""
Retrive the configuration (all settable key values).
@retval name : value configuration dict.
"""
config = {}
for (key, val) in self._param_dict.iteritems():
if(self.is_settable_param(key)):
config[key] = val.get_value()
return config
def format(self, name, val=None):
"""
Format a parameter for a set command.
@param name The name of the parameter.
@param val The parameter value.
@retval The value formatted as a string for writing to the device.
@raises InstrumentProtocolException if the value could not be formatted
or value object is missing.
@raises KeyError if the parameter name is invalid.
"""
if not self._param_dict[name].value:
raise InstrumentParameterException("No value present for %s!" % name)
if val == None:
current_value = self._param_dict[name].value.get_value()
else:
current_value = val
return self._param_dict[name].value.f_format(current_value)
def get_keys(self):
"""
Return list of all parameter names in the dictionary.
"""
return self._param_dict.keys()
def get_direct_access_list(self):
"""
Return a list of parameter names that are tagged as direct access
parameters
@retval A list of parameter names, possibly empty
@raises InstrumentParameterException if the description is missing
"""
return_val = []
for key in self._param_dict.keys():
if not self._param_dict[key].description:
raise InstrumentParameterException("No description present!")
if self._param_dict[key].description.direct_access == True:
return_val.append(key)
return return_val
def is_settable_param(self, name):
"""
Return true if a parameter is not read only
@param name name of a parameter
@retval True if the parameter is flagged as not read only
@raises KeyError if parameter doesn't exist
@raises InstrumentParameterException if the description is missing
"""
if not self._param_dict[name].description:
raise InstrumentParameterException("No description present!")
return not (self._param_dict[name].description.visibility == ParameterDictVisibility.READ_ONLY)
def is_startup_param(self, name):
"""
Return true if a parameter name references a startup parameter
@param name name of a parameter
@retval True if the parameter is flagged as a startup param
@raises KeyError if parameter doesn't exist
@raises InstrumentParameterException if the description is missing
"""
if not self._param_dict[name].description:
raise InstrumentParameterException("No description present!")
return self._param_dict[name].description.startup_param == True
def get_startup_list(self):
"""
Return a list of parameter names that are tagged as startup parameters
@retval A list of parameter names, possibly empty
"""
return_val = []
for key in self._param_dict.keys():
if self.is_startup_param(key):
return_val.append(key)
return return_val
def get_visibility_list(self, visibility):
"""
Return a list of parameter names that are tagged with the given
visibility
@param visability A value from the ParameterDictVisibility enum
@retval A list of parameter names, possibly empty
"""
return_val = []
for key in self._param_dict.keys():
if self._param_dict[key].description.visibility == visibility:
return_val.append(key)
return return_val
def generate_dict(self):
"""
Generate a JSONifyable metadata schema that describes the parameters.
This could be passed up toward the agent for ultimate handing to the UI.
This method only handles the parameter block of the schema.
"""
return_struct = {}
for param_key in self._param_dict.keys():
param_struct = {}
value_struct = {}
if self._param_dict[param_key] != None:
param_obj = self._param_dict[param_key].description
# Description objects
if param_obj.get_timeout != None:
param_struct[ParameterDictKey.GET_TIMEOUT] = param_obj.get_timeout
if param_obj.set_timeout != None:
param_struct[ParameterDictKey.SET_TIMEOUT] = param_obj.set_timeout
if param_obj.visibility != None:
param_struct[ParameterDictKey.VISIBILITY] = param_obj.visibility
if param_obj.startup_param != None:
param_struct[ParameterDictKey.STARTUP] = param_obj.startup_param
if param_obj.direct_access != None:
param_struct[ParameterDictKey.DIRECT_ACCESS] = param_obj.direct_access
if param_obj.display_name != None:
param_struct[ParameterDictKey.DISPLAY_NAME] = param_obj.display_name
if param_obj.description != None:
param_struct[ParameterDictKey.DESCRIPTION] = param_obj.description
# Value objects
if param_obj.type != None:
value_struct[ParameterDictKey.TYPE] = param_obj.type
if param_obj.default_value != None:
value_struct[ParameterDictKey.DEFAULT] = param_obj.default_value
if param_obj.units != None:
value_struct[ParameterDictKey.UNITS] = param_obj.units
if param_obj.description != None:
value_struct[ParameterDictKey.DESCRIPTION] = param_obj.value_description
param_struct[ParameterDictKey.VALUE] = value_struct
return_struct[param_key] = param_struct
return return_struct
def load_strings(self, devel_path=None, filename=None):
"""
Load the metadata for a parameter set. starting by looking at the default
path in the egg and filesystem first, overriding what might have been
hard coded. If a system filename is given look there. If parameter
strings cannot be found, return False and carry on with hard coded values.
@param devel_path The path where the file can be found during development.
This is likely in the mi/instrument/make/model/flavor/resource directory.
@param filename The filename of the custom file to load, including as full a path
as desired (complete path recommended)
@retval True if something could be loaded, False otherwise
"""
log.debug("Loading parameter dictionary strings, dev path is %s, filename is %s",
devel_path, filename)
# if the file is in the default spot of the working path or egg, get that one
try:
metadata = self.get_metadata_from_source(devel_path, filename)
except IOError as e:
log.warning("Encountered IOError: %s", e)
return False # Fill the fields
if metadata:
log.debug("Found parameter metadata, loading dictionary")
for (param_name, param_value) in metadata[ParameterDictKey.PARAMETERS].items():
log.trace("load_strings setting param name/value: %s / %s", param_name, param_value)
for (name, value) in param_value.items():
if param_name not in self._param_dict:
continue
if (name == ParameterDictKey.DESCRIPTION):
self._param_dict[param_name].description.description = value
if name == ParameterDictKey.DISPLAY_NAME:
self._param_dict[param_name].description.display_name = value
if name == ParameterDictKey.UNITS:
self._param_dict[param_name].description.units = value
if name == ParameterDictKey.TYPE:
self._param_dict[param_name].description.type = value
if name == ParameterDictKey.VALUE_DESCRIPTION:
self._param_dict[param_name].description.value_description = value
return True
return False # no metadata!
|
py | b416f218665a9adc7f126f213db9845ad6bbf06e | import argparse
import numpy as np
import gym
import torch
from agent import Agent,Env
from network import Net
if __name__ == "__main__":
agent = Agent()
env = Env()
training_records = []
running_score = 0
best_score = 0
state = env.reset()
for i_ep in range(100000):
env.die = False
env.render =False
score = 0
state = env.reset()
if i_ep % 100 == 0:
env.render= False
for t in range(10000):
action, a_logp = agent.select_action(state)
state_ , reward = env.step(action* np.array([0.2, 0.1]) + np.array([-0.15,0.]),t)
if env.die :
score += reward
break
if agent.store((state, action, a_logp, reward, state_)):
agent.update()
print('update')
score += reward
state = state_
running_score = running_score * 0.99 + score * 0.01
#print('Score: {:.2f}, Action taken: {}'.format(score, t+1))
if i_ep % 10 == 0:
print('Ep {}\tLast score: {:.2f}\tMoving average score: {:.2f}'.format(i_ep, score, running_score))
agent.save_param()
agent.save_param()
|
py | b416f260c390260acb6bab2eb881854746eac5ed | from django.contrib import messages
from django.shortcuts import render, redirect, get_object_or_404
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.http import require_http_methods
from ..proxies.job_category.management import JobCategoryManagementProxy
from ..helpers import labour_admin_required
from ..forms import RemoveJobCategoryForm
@labour_admin_required
@require_http_methods(["GET", "HEAD", "POST"])
def admin_jobcategories_view(request, vars, event):
job_categories = JobCategoryManagementProxy.objects.filter(event=event, app_label='labour')
if request.method == 'POST':
if 'remove' in request.POST:
remove_job_category_form = RemoveJobCategoryForm(request.POST)
if remove_job_category_form.is_valid():
job_category_id = remove_job_category_form.cleaned_data['remove']
job_category = get_object_or_404(JobCategoryManagementProxy, event=event, id=job_category_id)
if job_category.can_remove:
job_category.delete()
messages.success(request, _("The job category was removed."))
return redirect('admin_jobcategories_view', event.slug)
messages.error(request, _("Invalid request."))
return redirect('admin_jobcategories_view', event.slug)
vars.update(job_categories=job_categories)
return render(request, 'labour_admin_jobcategories_view.pug', vars)
|
py | b416f27847466c8fca5673fe4da3035ef9c834ab | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import pycocotools.coco as coco
from pycocotools.cocoeval import COCOeval
import numpy as np
import json
import os
import torch.utils.data as data
class COCO(data.Dataset):
num_classes = 80
default_resolution = [512, 512]
mean = np.array([0.40789654, 0.44719302, 0.47026115],
dtype=np.float32).reshape(1, 1, 3)
std = np.array([0.28863828, 0.27408164, 0.27809835],
dtype=np.float32).reshape(1, 1, 3)
def __init__(self, opt, split):
super(COCO, self).__init__()
self.data_dir = os.path.join(opt.data_dir, 'coco')
self.img_dir = os.path.join(self.data_dir, "images", '{}2014'.format(split))
if split == 'test':
self.annot_path = os.path.join(
self.data_dir, 'annotations',
'image_info_test-dev2017.json').format(split)
else:
self.annot_path = os.path.join(
self.data_dir, 'annotations',
'instances_{}2014.json').format(split)
self.max_objs = 128
self.class_name = [
'__background__', 'person', 'bicycle', 'car', 'motorcycle', 'airplane',
'bus', 'train', 'truck', 'boat', 'traffic light', 'fire hydrant',
'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse',
'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 'backpack',
'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 'skis',
'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove',
'skateboard', 'surfboard', 'tennis racket', 'bottle', 'wine glass',
'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich',
'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake',
'chair', 'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv',
'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave',
'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',
'scissors', 'teddy bear', 'hair drier', 'toothbrush']
self._valid_ids = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
14, 15, 16, 17, 18, 19, 20, 21, 22, 23,
24, 25, 27, 28, 31, 32, 33, 34, 35, 36,
37, 38, 39, 40, 41, 42, 43, 44, 46, 47,
48, 49, 50, 51, 52, 53, 54, 55, 56, 57,
58, 59, 60, 61, 62, 63, 64, 65, 67, 70,
72, 73, 74, 75, 76, 77, 78, 79, 80, 81,
82, 84, 85, 86, 87, 88, 89, 90]
self.cat_ids = {v: i for i, v in enumerate(self._valid_ids)}
self.voc_color = [(v // 32 * 64 + 64, (v // 8) % 4 * 64, v % 8 * 32) \
for v in range(1, self.num_classes + 1)]
self._data_rng = np.random.RandomState(123)
self._eig_val = np.array([0.2141788, 0.01817699, 0.00341571],
dtype=np.float32)
self._eig_vec = np.array([
[-0.58752847, -0.69563484, 0.41340352],
[-0.5832747, 0.00994535, -0.81221408],
[-0.56089297, 0.71832671, 0.41158938]
], dtype=np.float32)
# self.mean = np.array([0.485, 0.456, 0.406], np.float32).reshape(1, 1, 3)
# self.std = np.array([0.229, 0.224, 0.225], np.float32).reshape(1, 1, 3)
self.split = split
self.opt = opt
print('==> initializing coco 2017 {} data.'.format(split))
self.coco = coco.COCO(self.annot_path)
self.images = self.coco.getImgIds()
self.num_samples = len(self.images)
print('Loaded {} {} samples'.format(split, self.num_samples))
def _to_float(self, x):
return float("{:.2f}".format(x))
def convert_eval_format(self, all_bboxes):
# import pdb; pdb.set_trace()
detections = []
for image_id in all_bboxes:
for cls_ind in all_bboxes[image_id]:
category_id = self._valid_ids[cls_ind - 1]
for bbox in all_bboxes[image_id][cls_ind]:
bbox[2] -= bbox[0]
bbox[3] -= bbox[1]
score = bbox[4]
bbox_out = list(map(self._to_float, bbox[0:4]))
detection = {
"image_id": int(image_id),
"category_id": int(category_id),
"bbox": bbox_out,
"score": float("{:.2f}".format(score))
}
if len(bbox) > 5:
extreme_points = list(map(self._to_float, bbox[5:13]))
detection["extreme_points"] = extreme_points
detections.append(detection)
return detections
def __len__(self):
return self.num_samples
def save_results(self, results, save_dir):
json.dump(self.convert_eval_format(results),
open('{}/results.json'.format(save_dir), 'w'))
def run_eval(self, results, save_dir):
# result_json = os.path.join(save_dir, "results.json")
# detections = self.convert_eval_format(results)
# json.dump(detections, open(result_json, "w"))
self.save_results(results, save_dir)
coco_dets = self.coco.loadRes('{}/results.json'.format(save_dir))
coco_eval = COCOeval(self.coco, coco_dets, "bbox")
coco_eval.evaluate()
coco_eval.accumulate()
coco_eval.summarize()
|
py | b416f309cc808f90847907808419506d18aaa9dd | import base64
import hashlib
import json
import logging
import os
import time
from typing import Iterable
from botocore.exceptions import ClientError
from pyspark.sql import DataFrame, SparkSession
from pyspark.sql.types import Row, StructType
from pyspark.sql.utils import AnalysisException
from finitestate.firmware.bloomfilter import get_bloom_filter_key
from finitestate.firmware.schemas.schema_file_tree import file_tree_schema
from finitestate.common.aws.s3 import s3_client, get_bucket_and_key_from_uri
from finitestate.common.aws.catalogutils import using_boto3 as get_data_catalog
from finitestate.common.aws.glue import ExecutorGlobals
logger = logging.getLogger(__name__)
def load_dataframe_from_glue_table(database: str, table_name: str, glue_context) -> DataFrame:
return glue_context.spark_session.table('{}.{}'.format(database, table_name))
def downselect_dataframe(dataframe: DataFrame, list_of_columns_to_select) -> DataFrame:
return dataframe.select(*list_of_columns_to_select)
def publish_jsonl_to_s3(key, row, target_bucket, max_retries=5, validate_payload_checksum=False):
"""
Publishes individual rows to S3 as minified JSON. This assumes that the
entire 'row' element is written as a single JSON object to the target file.
'data_type' is the plugin-name or otherwise descriptor of the data that is
to be written. Additionally, 'row' must have a 'firmware_hash' field.
"""
payload = json.dumps(row.asDict(recursive=True) if isinstance(row, Row) else row, separators=(',', ':'))
output = {'Bucket': target_bucket, 'Key': key}
other_kwargs = {}
if validate_payload_checksum:
md5 = base64.b64encode(hashlib.md5(payload.encode()).digest()).decode()
output['ContentMD5'] = md5
other_kwargs['ContentMD5'] = md5
retry = 0
while retry < max_retries:
try:
response = ExecutorGlobals.s3_client().put_object(Bucket=target_bucket, Key=key, Body=payload, **other_kwargs)
output['ETag'] = response['ETag']
output['Attempts'] = retry + 1
return output
except ClientError:
retry += 1
time.sleep(2**retry)
output['Attempts'] = retry + 1
return output
def publish_custom_cloudwatch_glue_metric(cloudwatch_client, job_name, job_run_ids, metric_name, value, unit=None, namespace=None):
for job_run_id in job_run_ids:
response = cloudwatch_client.put_metric_data(
MetricData=[
{
'MetricName': metric_name,
'Dimensions': [{
'Name': 'JobName',
'Value': job_name
}, {
'Name': 'JobRunId',
'Value': job_run_id
}],
'Unit': unit or 'None',
'Value': value
},
],
Namespace=namespace or 'Glue',
)
if not response or 200 != response.get('ResponseMetadata', {}).get('HTTPStatusCode'):
raise Exception('Failed to publish metric: {}'.format(response))
def publish_df_as_jsonl(df, get_key_for_row, target_bucket, row_formatter=None):
# yapf: disable
try:
return df.rdd.map(
lambda row: publish_jsonl_to_s3(get_key_for_row(row), row_formatter(row) if row_formatter else row, target_bucket, validate_payload_checksum=True)
).filter(
lambda output: output.get('ETag') # was written to S3
).count()
except Exception as e:
print('Failed to write row as jsonl: {}'.format(e))
return 0
# yapf: enable
def read_firmware_file_tree(glue_database: str, fw_sha256: str) -> DataFrame:
"""
Reads a firmware file tree from the jsonl files backing the file_tree table defined in the Glue Data Catalog.
:param glue_database: The name of the Glue database from which to read the files, e.g. fimrware_prod.
:param fw_sha256: The SHA 256 of the firmware to read.
"""
file_tree_path = get_data_catalog().get_table_path(glue_database, 'file_tree')
return SparkSession.builder.getOrCreate().read.json(os.path.join(file_tree_path, f'{fw_sha256}.jsonl'), schema=file_tree_schema)
def read_firmware_level_data(glue_database: str, table_name: str, fw_sha256: str, schema: StructType, extension: str = 'jsonl') -> DataFrame:
"""
Reads a json/jsonl dataset from a single file identified by a firmware sha256, with the path determined by the
table projecting that data in the Glue Data Catalog.
Args:
glue_database: The name of the Glue database from which to read the file, e.g. firmware_prod
table_name: The name of the table to read
fw_sha256: The SHA 256 of the firmware to read
schema: The PySpark schema for the returned data
extension: The file extension of the file, typically jsonl which is the default.
Returns: A PySpark DataFrame of the data from object storage, or an empty DataFrame with the appropriate schema
"""
path = get_data_catalog().get_table_path(glue_database, table_name)
spark = SparkSession.builder.getOrCreate()
try:
return spark.read.json(os.path.join(path, f'{fw_sha256}.{extension}'), schema=schema)
except AnalysisException as e:
logger.exception(f'Failed to read firmware {fw_sha256} data from {path} - returning empty DataFrame')
return spark.createDataFrame(spark.sparkContext.emptyRDD(), schema)
def read_sbom(glue_database: str, fw_sha256: str) -> DataFrame:
"""
Reads an SBoM from the json files backing the sbom table defined in the Glue Data Catalog.
:param glue_database: The name of the Glue database from which to read the files, e.g. fimrware_prod.
:param fw_sha256: The SHA 256 of the firmware whose SBoM should be read.
"""
from finitestate.firmware.schemas.schema_sbom import sbom_schema
sbom_path = get_data_catalog().get_table_path(glue_database, 'sbom')
return SparkSession.builder.getOrCreate().read.json(os.path.join(sbom_path, f'{fw_sha256}.json'), schema=sbom_schema)
def read_firmware_analytics_from_tree(glue_database: str, table_name: str, file_tree_df: DataFrame, schema: StructType) -> DataFrame:
"""
Reads a firmware analytic (e.g. crypto_material) from the jsonl files backing the table for that analytic in the Glue Data Catalog. The
set of file hashes to read are obtained from the supplied file_tree DataFrame, which is only required to have the `file_hash` column.
:param glue_database: The name of the Glue database from which to read the files, e.g. fimrware_prod.
:param table_name: The name of the table to read
:param file_tree_df: The file_tree DataFrame
:param schema: The PySpark schema for the returned data.
"""
path = get_data_catalog().get_table_path(glue_database, table_name)
bucket, key_prefix = get_bucket_and_key_from_uri(path)
def read_file(file_hash: str):
try:
for line in ExecutorGlobals.s3_client().get_object(Bucket=bucket, Key=os.path.join(key_prefix, file_hash) + '.jsonl')['Body'].iter_lines():
yield line.decode('utf-8')
except Exception as e:
return None
# yapf: disable
file_hashes_rdd = file_tree_df.select(
'file_hash'
).dropna().distinct().rdd.map(
lambda row: row.file_hash
)
redis_host = os.environ.get('REDIS_HOST')
if redis_host:
redis_port = int(os.environ.get('REDIS_PORT', '6379'))
bloom_filter_key = get_bloom_filter_key(key_prefix)
def check_bloom_filters(partition: Iterable[str]):
from more_itertools import chunked
from finitestate.firmware.bloomfilter.client.redis import RedisBloomFilterClient
client = RedisBloomFilterClient(
redis_client=ExecutorGlobals.redisbloom_client(host=redis_host, port=redis_port)
)
for file_hashes in chunked(partition, n=10000):
yield from client.exists(key=bloom_filter_key, objects=file_hashes)
if ExecutorGlobals.redisbloom_client(host=redis_host, port=redis_port).exists(bloom_filter_key):
logger.info(f'Filtering {glue_database}.{table_name} file hashes according to bloom filter membership in {bloom_filter_key}')
file_hashes_rdd = file_hashes_rdd.mapPartitions(
check_bloom_filters
)
else:
logger.warning(f'Performing exhaustive search for files in {key_prefix}; check plugin configuration to enable use of bloom filters')
return SparkSession.builder.getOrCreate().read.json(
file_hashes_rdd.flatMap(read_file).filter(lambda x: x), schema=schema
)
|
py | b416f33b4b0e0f63d95e45fd789a4e6fa43b6961 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jul 9 09:05:20 2021
@author: pearlsaldanha
"""
import os
os.chdir("/Users/admin")
from TRACER import *
atlas = AtlasLoader(atlas_folder='/Users/admin/TRACER/waxholm_atlas', atlas_version ='v4')
presurgery = ProbesInsertion(atlas, probe_folder ='/Users/admin/TRACER/probes')
presurgery.re_load_probes('Probe0')
probe_folder = '/Users/admin/TRACER/probes'
vis3d_presurgery = vis_inserted_probes(atlas, probe_folder)
vis3d_presurgery.vis2d()
vis3d_presurgery.vis3d()
preprocess_hist = preprocess_histology('/Users/admin/TRACER/histology')
register_probes = ProbesRegistration(atlas, processed_histology_folder='/Users/admin/TRACER/histology/processed', show_hist=True)
probe_folder = '/Users/admin/TRACER/histology/processed/probes'
vis3dres = vis_registered_probes(atlas,probe_folder)
vis3dres.vis3d()
vis3dres.vis2d() |
py | b416f42b9a79e4fc99d8bc3b0a161a6bcb86e656 | ##############################################################################
# Copyright (c) 2016 Huawei Technologies Co.,Ltd and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
from __future__ import absolute_import
import os
from functools import reduce
import pkg_resources
from yardstick.common.utils import parse_yaml
dirname = os.path.dirname
abspath = os.path.abspath
join = os.path.join
sep = os.path.sep
CONF = {}
def get_param(key, default=''):
# we have to defer this to runtime so that we can mock os.environ.get in unittests
conf_file = os.environ.get('CONF_FILE', '/etc/yardstick/yardstick.yaml')
# don't re-parse yaml for each lookup
if not CONF:
CONF.update(parse_yaml(conf_file))
try:
return reduce(lambda a, b: a[b], key.split('.'), CONF)
except KeyError:
if not default:
raise
return default
try:
SERVER_IP = get_param('api.server_ip')
except KeyError:
try:
from pyroute2 import IPDB
except ImportError:
SERVER_IP = '172.17.0.1'
else:
with IPDB() as ip:
try:
SERVER_IP = ip.routes['default'].gateway
except KeyError:
# during unittests ip.routes['default'] can be invalid
SERVER_IP = '127.0.0.1'
if not SERVER_IP:
SERVER_IP = '127.0.0.1'
# dir
CONF_DIR = get_param('dir.conf', '/etc/yardstick')
IMAGE_DIR = get_param('dir.images', '/home/opnfv/images/')
REPOS_DIR = get_param('dir.repos', '/home/opnfv/repos/yardstick')
RELENG_DIR = get_param('dir.releng', '/home/opnfv/repos/releng')
LOG_DIR = get_param('dir.log', '/tmp/yardstick/')
YARDSTICK_ROOT_PATH = dirname(
dirname(abspath(pkg_resources.resource_filename(__name__, "")))) + sep
TASK_LOG_DIR = get_param('dir.tasklog', '/var/log/yardstick/')
CONF_SAMPLE_DIR = join(REPOS_DIR, 'etc/yardstick/')
ANSIBLE_DIR = join(REPOS_DIR, 'ansible')
SAMPLE_CASE_DIR = join(REPOS_DIR, 'samples')
TESTCASE_DIR = join(YARDSTICK_ROOT_PATH, 'tests/opnfv/test_cases/')
TESTSUITE_DIR = join(YARDSTICK_ROOT_PATH, 'tests/opnfv/test_suites/')
DOCS_DIR = join(REPOS_DIR, 'docs/testing/user/userguide/')
OPENSTACK_CONF_DIR = '/etc/openstack'
# file
OPENRC = get_param('file.openrc', '/etc/yardstick/openstack.creds')
ETC_HOSTS = get_param('file.etc_hosts', '/etc/hosts')
CONF_FILE = join(CONF_DIR, 'yardstick.conf')
POD_FILE = join(CONF_DIR, 'pod.yaml')
CLOUDS_CONF = join(OPENSTACK_CONF_DIR, 'clouds.yml')
K8S_CONF_FILE = join(CONF_DIR, 'admin.conf')
CONF_SAMPLE_FILE = join(CONF_SAMPLE_DIR, 'yardstick.conf.sample')
FETCH_SCRIPT = get_param('file.fetch_script', 'utils/fetch_os_creds.sh')
FETCH_SCRIPT = join(RELENG_DIR, FETCH_SCRIPT)
CLEAN_IMAGES_SCRIPT = get_param('file.clean_image_script',
'tests/ci/clean_images.sh')
CLEAN_IMAGES_SCRIPT = join(REPOS_DIR, CLEAN_IMAGES_SCRIPT)
LOAD_IMAGES_SCRIPT = get_param('file.load_image_script',
'tests/ci/load_images.sh')
LOAD_IMAGES_SCRIPT = join(REPOS_DIR, LOAD_IMAGES_SCRIPT)
DEFAULT_OUTPUT_FILE = get_param('file.output_file', '/tmp/yardstick.out')
DEFAULT_HTML_FILE = get_param('file.html_file', '/tmp/yardstick.htm')
REPORTING_FILE = get_param('file.reporting_file', '/tmp/report.html')
# influxDB
INFLUXDB_IP = get_param('influxdb.ip', SERVER_IP)
INFLUXDB_PORT = get_param('influxdb.port', 8086)
INFLUXDB_USER = get_param('influxdb.username', 'root')
INFLUXDB_PASS = get_param('influxdb.password', 'root')
INFLUXDB_DB_NAME = get_param('influxdb.db_name', 'yardstick')
INFLUXDB_IMAGE = get_param('influxdb.image', 'tutum/influxdb')
INFLUXDB_TAG = get_param('influxdb.tag', '0.13')
INFLUXDB_DASHBOARD_PORT = 8083
# grafana
GRAFANA_IP = get_param('grafana.ip', SERVER_IP)
GRAFANA_PORT = get_param('grafana.port', 3000)
GRAFANA_USER = get_param('grafana.username', 'admin')
GRAFANA_PASS = get_param('grafana.password', 'admin')
GRAFANA_IMAGE = get_param('grafana.image', 'grafana/grafana')
GRAFANA_TAG = get_param('grafana.tag', '4.4.3')
GRAFANA_MAPPING_PORT = 1948
# api
API_PORT = 5000
DOCKER_URL = 'unix://var/run/docker.sock'
INSTALLERS = ['apex', 'compass', 'fuel', 'joid']
SQLITE = 'sqlite:////tmp/yardstick.db'
API_SUCCESS = 1
API_ERROR = 2
TASK_NOT_DONE = 0
TASK_DONE = 1
TASK_FAILED = 2
BASE_URL = 'http://localhost:5000'
ENV_ACTION_API = BASE_URL + '/yardstick/env/action'
ASYNC_TASK_API = BASE_URL + '/yardstick/asynctask'
# general
TESTCASE_PRE = 'opnfv_yardstick_'
TESTSUITE_PRE = 'opnfv_'
|
py | b416f4a4103fe6675a045cdff38f157cc8161d27 | from libkludge.type_info import TypeInfo
from libkludge.selector import Selector
from libkludge.dir_qual_type_info import DirQualTypeInfo
from libkludge.cpp_type_expr_parser import *
class KLExtTypeAliasTypeInfo(TypeInfo):
def __init__(self, jinjenv, undq_cpp_type_expr, kl_type_name):
TypeInfo.__init__(
self,
jinjenv,
kl_name_base = kl_type_name,
lib_expr = undq_cpp_type_expr,
)
def build_codec_lookup_rules(self):
tds = TypeInfo.build_codec_lookup_rules(self)
tds["conv"]["*"] = "protocols/conv/builtin/none"
tds["result"]["decl_and_assign_lib_begin"] = "types/builtin/kl_ext_type_alias/result"
tds["result"]["decl_and_assign_lib_end"] = "types/builtin/kl_ext_type_alias/result"
tds["result"]["indirect_lib_to_edk"] = "types/builtin/kl_ext_type_alias/result"
tds["repr"]["defn_kl"] = "types/builtin/kl_ext_type_alias/repr"
return tds
class KLExtTypeAliasSelector(Selector):
def __init__(self, ext, cpp_type_expr, kl_type_name):
Selector.__init__(self, ext)
self.cpp_type_expr = cpp_type_expr
self.kl_type_name = kl_type_name
def get_desc(self):
return "KLExtTypeAlias:%s" % str(self.cpp_type_expr)
def maybe_create_dqti(self, type_mgr, cpp_type_expr):
undq_cpp_type_expr, dq = cpp_type_expr.get_undq()
if undq_cpp_type_expr == self.cpp_type_expr:
return DirQualTypeInfo(
dq,
KLExtTypeAliasTypeInfo(
self.jinjenv,
self.cpp_type_expr,
self.kl_type_name,
)
)
|
gyp | b416f606db3d7848a4eaa30f4e7343c78d5bff28 | {
'targets': [
{
'target_name': 'oniguruma',
'type': 'static_library',
'conditions': [
['OS=="win"', {
'msvs_disabled_warnings': [
4244, # conversion from '__int64' to 'int', possible loss of data
],
'defines': [
'ONIG_EXTERN=extern',
],
}],
['OS=="linux"', {
'cflags': [
'-w',
],
}],
],
'direct_dependent_settings': {
'include_dirs': [
'deps/onig'
],
},
'include_dirs': [
'deps/onig',
'deps/onig/enc',
],
'sources': [
'deps/onig/onig_init.c',
'deps/onig/oniggnu.h',
'deps/onig/onigposix.h',
'deps/onig/oniguruma.h',
'deps/onig/regcomp.c',
'deps/onig/regenc.c',
'deps/onig/regenc.h',
'deps/onig/regerror.c',
'deps/onig/regexec.c',
'deps/onig/regext.c',
'deps/onig/reggnu.c',
'deps/onig/regint.h',
'deps/onig/regparse.c',
'deps/onig/regparse.h',
'deps/onig/regposerr.c',
'deps/onig/regposix.c',
'deps/onig/regsyntax.c',
'deps/onig/regtrav.c',
'deps/onig/regversion.c',
'deps/onig/st.c',
'deps/onig/st.h',
'deps/onig/enc/ascii.c',
'deps/onig/enc/big5.c',
'deps/onig/enc/cp1251.c',
'deps/onig/enc/euc_jp_prop.c',
'deps/onig/enc/euc_jp.c',
'deps/onig/enc/euc_kr.c',
'deps/onig/enc/euc_tw.c',
'deps/onig/enc/gb18030.c',
'deps/onig/enc/iso8859_1.c',
'deps/onig/enc/iso8859_2.c',
'deps/onig/enc/iso8859_3.c',
'deps/onig/enc/iso8859_4.c',
'deps/onig/enc/iso8859_5.c',
'deps/onig/enc/iso8859_6.c',
'deps/onig/enc/iso8859_7.c',
'deps/onig/enc/iso8859_8.c',
'deps/onig/enc/iso8859_9.c',
'deps/onig/enc/iso8859_10.c',
'deps/onig/enc/iso8859_11.c',
'deps/onig/enc/iso8859_13.c',
'deps/onig/enc/iso8859_14.c',
'deps/onig/enc/iso8859_15.c',
'deps/onig/enc/iso8859_16.c',
'deps/onig/enc/koi8.c',
'deps/onig/enc/koi8_r.c',
'deps/onig/enc/mktable.c',
'deps/onig/enc/sjis_prop.c',
'deps/onig/enc/sjis.c',
'deps/onig/enc/unicode.c',
# 'deps/onig/enc/unicode_egcb_data.c',
# 'deps/onig/enc/unicode_fold_data.c',
'deps/onig/enc/unicode_fold1_key.c',
'deps/onig/enc/unicode_fold2_key.c',
'deps/onig/enc/unicode_fold3_key.c',
# 'deps/onig/enc/unicode_property_data_posix.c',
# 'deps/onig/enc/unicode_property_data.c',
'deps/onig/enc/unicode_unfold_key.c',
# 'deps/onig/enc/unicode_wb_data.c',
'deps/onig/enc/utf16_be.c',
'deps/onig/enc/utf16_le.c',
'deps/onig/enc/utf32_be.c',
'deps/onig/enc/utf32_le.c',
'deps/onig/enc/utf8.c',
],
},
{
'target_name': 'onig_scanner',
'dependencies': [
'oniguruma'
],
'include_dirs': [ '<!(node -e "require(\'nan\')")' ],
'sources': [
'src/onig-result.cc',
'src/onig-reg-exp.cc',
'src/onig-scanner.cc',
'src/onig-scanner-worker.cc',
'src/onig-searcher.cc',
'src/onig-string.cc'
],
'conditions': [
['OS=="mac"', {
'xcode_settings': {
'OTHER_CPLUSPLUSFLAGS': ['-std=c++11', '-stdlib=libc++'],
'MACOSX_DEPLOYMENT_TARGET': '10.7.0',
}
}],
['OS in "linux solaris"', {
'cflags': [
'-std=c++0x',
'-Wno-unused-result',
'-Wno-missing-field-initializers',
],
'cflags_cc!': [
'-fno-rtti'
]
}],
['OS=="win"', {
'msvs_disabled_warnings': [
4244, # conversion from 'double' to 'int', possible loss of data
4267, # conversion from 'size_t' to 'int', possible loss of data
4530, # C++ exception handler used, but unwind semantics are not enabled
],
'msvs_settings': {
'VCCLCompilerTool' : {
'AdditionalOptions' : ['/EHsc']
}
},
'defines': [
'ONIG_EXTERN=extern',
],
}],
['OS=="freebsd"', {
'cflags': [
'-std=c++0x',
]
}]
]
}
]
}
|
py | b416f7603d9ffffa28cc896cc08c599401cd0d70 | # -*- "coding: utf-8" -*-
import synonyms
import numpy as np
from cosine import Cosine
cosine = Cosine(n_recommendation=4)
with open("vocabulary_filter.txt", "r", encoding="utf-8") as f:
vocabulary = f.read().split()[:-1]
vectors = []
for word in vocabulary:
try:
vectors.append(synonyms.v(word)) # 使用 synonyms 获得词向量
except:
pass
vectors = np.array(vectors)
indices, similarities = cosine.cal_similarity(vectors, vectors) # 调用cosine模块计算余弦相似度
with open("method_synonyms.csv", "w", encoding="utf-8") as f:
for nrow, row in enumerate(indices):
for ncol, col in enumerate(row):
if ncol == 0: # 跳过自身
continue
f.write("{},{},{}\n".format(vocabulary[nrow], vocabulary[col], similarities[nrow][ncol]))
|
py | b416f8015e195943bcce2f8689b22e148fd3ef26 | from __future__ import absolute_import
import numpy as np
from matplotlib import pyplot as plt
# from astroML.plotting.tools import draw_ellipse
from astroML.plotting import setup_text_plots
# from sklearn.mixture import GMM as skl_GMM
def plot_bic(param_range,bics,lowest_comp):
plt.clf()
setup_text_plots(fontsize=16, usetex=False)
fig = plt.figure(figsize=(12, 6))
plt.plot(param_range,bics,color='blue',lw=2, marker='o')
plt.text(lowest_comp, bics.min() * 0.97 + .03 * bics.max(), '*',
fontsize=14, ha='center')
plt.xticks(param_range)
plt.ylim(bics.min() - 0.05 * (bics.max() - bics.min()),
bics.max() + 0.05 * (bics.max() - bics.min()))
plt.xlim(param_range.min() - 1, param_range.max() + 1)
plt.xticks(param_range,fontsize=14)
plt.yticks(fontsize=14)
plt.xlabel('Number of components',fontsize=18)
plt.ylabel('BIC score',fontsize=18)
plt.show()
|
py | b416f87be2a5cf3159df956319a6806498c1a39e | #
# PySNMP MIB module MOXA-NP6000-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/MOXA-NP6000-MIB
# Produced by pysmi-0.3.4 at Wed May 1 14:13:47 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
ModuleIdentity, Counter32, TimeTicks, Bits, MibIdentifier, Gauge32, NotificationType, iso, Unsigned32, enterprises, MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, ObjectIdentity, Counter64, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "Counter32", "TimeTicks", "Bits", "MibIdentifier", "Gauge32", "NotificationType", "iso", "Unsigned32", "enterprises", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "ObjectIdentity", "Counter64", "IpAddress")
DisplayString, MacAddress, DateAndTime, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "MacAddress", "DateAndTime", "TextualConvention")
moxa = MibIdentifier((1, 3, 6, 1, 4, 1, 8691))
nport = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2))
np6000 = ModuleIdentity((1, 3, 6, 1, 4, 1, 8691, 2, 8))
if mibBuilder.loadTexts: np6000.setLastUpdated('200607120000Z')
if mibBuilder.loadTexts: np6000.setOrganization('Moxa Technologies Co.')
if mibBuilder.loadTexts: np6000.setContactInfo('Email: [email protected] Tel : +886 2 89191230 ext.300')
if mibBuilder.loadTexts: np6000.setDescription('The MIB module for Moxa NPort 6000 series specific information.')
swMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1))
class PortList(TextualConvention, OctetString):
description = "Each octet within this value specifies a set of eight ports, with the first octet specifying ports 1 through 8, the second octet specifying ports 9 through 16, etc. Within each octet, the most significant bit represents the lowest numbered port, and the least significant bit represents the highest numbered port. Thus, each port of the bridge is represented by a single bit within the value of this object. If that bit has a value of '1' then that port is included in the set of ports; the port is not included if its bit has a value of '0'."
status = 'current'
overview = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 1))
modelName = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: modelName.setStatus('current')
if mibBuilder.loadTexts: modelName.setDescription("The model name of the NPort. The possible values of this parameter are: 'NP6150' for NPort 6150, 'NP6250' for NPort 6250, 'NP6250-S-SC' for NPort 6250-S-SC, 'NP6250-M-SC' for NPort NPort 6250-M-SC, 'NP6450' for NPort 6450, 'NP6650-8' for NPort 6650-8, and 'NP6650-16' for NPort 6650-16.")
serialNumber = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: serialNumber.setStatus('current')
if mibBuilder.loadTexts: serialNumber.setDescription('The serial number of the NPort. Every NPort 6000 series device server is assigned a unique serial number before it is shipped.')
firmwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: firmwareVersion.setStatus('current')
if mibBuilder.loadTexts: firmwareVersion.setDescription('The version of the firmware currently running on the NPort.')
macAddress = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: macAddress.setStatus('current')
if mibBuilder.loadTexts: macAddress.setDescription("The MAC address of the NPort's Ethernet interface.")
viewLanSpeed = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: viewLanSpeed.setStatus('current')
if mibBuilder.loadTexts: viewLanSpeed.setDescription("The current speed and link status of the NPort's built-in LAN port. The possible values are 'No link', '10M/Link,' or '100M/Link.'")
viewLanModuleSpeed = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: viewLanModuleSpeed.setStatus('current')
if mibBuilder.loadTexts: viewLanModuleSpeed.setDescription("The current speed and link status of the the network module's LAN ports, if the module is installed. The value includes two parts. Each part indicates the status of one of the LAN ports. The possible values are 'No link', '10M/Link,' '100M/Link,' or continunous dashes to indicate that the LAN module is not installed.")
upTime = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: upTime.setStatus('current')
if mibBuilder.loadTexts: upTime.setDescription('The time (in human-readable notation) since the system was was last re-initialized.')
moduleType = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: moduleType.setStatus('current')
if mibBuilder.loadTexts: moduleType.setDescription('The module name currently plugged into the NPort.')
basicSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 2))
serverSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 2, 1))
serverName = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 2, 1, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: serverName.setStatus('current')
if mibBuilder.loadTexts: serverName.setDescription('A text string used to identify the NPort.')
serverLocation = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 2, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: serverLocation.setStatus('current')
if mibBuilder.loadTexts: serverLocation.setDescription('A text string used to identify the location of the NPort. This option is useful for specifying the location or application of different NPort device servers.')
timeSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 2, 2))
timeZone = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 2, 2, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: timeZone.setStatus('current')
if mibBuilder.loadTexts: timeZone.setDescription('The time-zone index of the NPort.')
localTime = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 2, 2, 2), DateAndTime()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: localTime.setStatus('current')
if mibBuilder.loadTexts: localTime.setDescription('The system time in seconds since 1970/1/1.')
timeServer = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 2, 2, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: timeServer.setStatus('current')
if mibBuilder.loadTexts: timeServer.setDescription("The NPort 6000 series device server uses SNTP (RFC-1769) for auto time calibration. Input the correct 'Time server' IP address or domain name. Once the NPort 6000 series device server is configured with the correct Time server address, it will request time information from the Time server every 10 minutes.")
networkSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3))
ipConfiguration = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("static", 0), ("dhcp", 1), ("dhcp-BOOTP", 2), ("bootp", 3), ("pppoe", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ipConfiguration.setStatus('current')
if mibBuilder.loadTexts: ipConfiguration.setDescription('The IP configuration mode of the NPort. You can choose from four possible IP configuration modes: Static, DHCP, DHCP/BOOTP, BOOTP, and PPPoE.')
sysIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sysIpAddress.setStatus('current')
if mibBuilder.loadTexts: sysIpAddress.setDescription('An IP address is a number assigned to a network device (such as a computer) as a permanent address on the network. Computers use the IP address to identify and talk to each other over the network. Choose a proper IP address that is unique and valid in your network environment.')
netMask = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netMask.setStatus('current')
if mibBuilder.loadTexts: netMask.setDescription('A subnet mask represents all of the network hosts at one geographic location, in one building, or on the same local area network. When a packet is sent out over the network, the NPort 6000 series device server will use the subnet mask to check whether the TCP/IP host specified in the packet is on the local network segment. If the address is on the same network segment as the NPort 6000 series device server, a connection is established directly from the NPort 6000 series device server. Otherwise, the connection is established through the given default gateway.')
defaultGateway = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: defaultGateway.setStatus('current')
if mibBuilder.loadTexts: defaultGateway.setDescription('A gateway is a network computer that acts as an entrance to another network. Usually, the computers that control traffic within the network or at the local Internet service provider are gateway nodes. The NPort 6000 series device server needs to know the IP address of the default gateway computer in order to communicate with the hosts outside the local network environment. For correct gateway IP address information, consult with the network administrator.')
dnsServer1IpAddr = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsServer1IpAddr.setStatus('current')
if mibBuilder.loadTexts: dnsServer1IpAddr.setDescription('The first DNS server address. When the user wants to visit a particular website, the computer asks a Domain Name System (DNS) server for the websites correct IP address, and then the computer uses the response to connect to the web server. DNS is the way that Internet domain names are identified and translated into IP addresses. A domain name is an alphanumeric name, such as moxa.com, that it is usually easier to remember. A DNS server is a host that translates this kind of text-based domain name into the numeric IP address used to establish a TCP/IP connection.')
dnsServer2IpAddr = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dnsServer2IpAddr.setStatus('current')
if mibBuilder.loadTexts: dnsServer2IpAddr.setDescription('The second DNS server address. When the user wants to visit a particular website, the computer asks a Domain Name System (DNS) server for the websites correct IP address, and then the computer uses the response to connect to the web server. DNS is the way that Internet domain names are identified and translated into IP addresses. A domain name is an alphanumeric name, such as moxa.com, that it is usually easier to remember. A DNS server is a host that translates this kind of text-based domain name into the numeric IP address used to establish a TCP/IP connection.')
pppoeUserAccount = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pppoeUserAccount.setStatus('current')
if mibBuilder.loadTexts: pppoeUserAccount.setDescription('The user account used by the NPort to access the Internet using PPPoE. For dynamic broad band networks such as xDSL or Cable Modem, users must enter the username and password that they received from their ISP to establish a network connection.')
pppoePassword = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pppoePassword.setStatus('current')
if mibBuilder.loadTexts: pppoePassword.setDescription('The password used by the NPort to access the Internet using PPPoE. For dynamic broad band networks such as xDSL or Cable Modem, users must enter the username and password that they received from their ISP to establish a network connection.')
winsFunction = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: winsFunction.setStatus('current')
if mibBuilder.loadTexts: winsFunction.setDescription('Enable or disable the WINS server.')
winsServer = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 10), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: winsServer.setStatus('current')
if mibBuilder.loadTexts: winsServer.setDescription('If a WINS Server is connected to the network, use this field to record the WINS Servers IP address. TCP/IP uses IP addresses to identify hosts, but users often use symbolic names, such as computer names. The WINS Server, which uses NetBIOS over TCP/IP, contains a dynamic database to map computer names to IP addresses.')
lan1Speed = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("auto-Negation", 0), ("tenMbps-Half", 1), ("tenMbps-Full", 2), ("hundredMbps-Half", 3), ("hundredMbps-Full", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lan1Speed.setStatus('current')
if mibBuilder.loadTexts: lan1Speed.setDescription('IEEE802.3 Ethernet supports the auto negotiation speed function to get suitable speeds. However, for connecting to some switches/hubs, the communication speed must be fixed at 100 Mbps or 10 Mbps. Users can use this function to fix the communication speed.')
routingProtocol = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("rip-1", 1), ("rip-2", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: routingProtocol.setStatus('current')
if mibBuilder.loadTexts: routingProtocol.setDescription('The routing protocol used by the NPort. This value could be none, RIP-1, or RIP-2.')
gratuitousArp = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gratuitousArp.setStatus('current')
if mibBuilder.loadTexts: gratuitousArp.setDescription('Enable or disable the gratuitous ARP function. For some applications, users need the NPort 6000 series device server to send broadcast packets to update the ARP table on the server. If the customer enables this function and sets the send period, the NPort 6000 series device server will send broadcast packets periodically.')
gratuitousArpSendPeriod = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 3, 14), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gratuitousArpSendPeriod.setStatus('current')
if mibBuilder.loadTexts: gratuitousArpSendPeriod.setDescription('The send period of the gratuitous ARP function.')
portSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4))
opModeSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1))
opMode = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 1))
opModePortTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 1, 1), )
if mibBuilder.loadTexts: opModePortTable.setStatus('current')
if mibBuilder.loadTexts: opModePortTable.setDescription("The serial ports' operation mode table.")
opModePortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 1, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: opModePortEntry.setStatus('current')
if mibBuilder.loadTexts: opModePortEntry.setDescription("The serial port's operation mode entry.")
portIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portIndex.setStatus('current')
if mibBuilder.loadTexts: portIndex.setDescription('The serial port index.')
portApplication = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 4, 11, 13, 12, 2, 3, 6, 1))).clone(namedValues=NamedValues(("disable", 0), ("device-Control", 4), ("socket", 11), ("pair-Connection", 13), ("ethernet-Modem", 12), ("terminal", 2), ("reverse-Terminal", 3), ("printer", 6), ("dial-InOut", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portApplication.setStatus('current')
if mibBuilder.loadTexts: portApplication.setDescription('The application of the serial port. Note that changing this setting may also change the operation mode.')
portMode = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20))).clone(namedValues=NamedValues(("pair-Slave", 0), ("pair-Master", 1), ("aspp", 2), ("raw-PRN", 3), ("slip", 4), ("slipd", 5), ("ppp", 6), ("disable", 7), ("telnetd", 8), ("dynamic", 9), ("tcp-Server", 10), ("lpd-PRN", 11), ("ethernet-Modem", 12), ("tcp-Client", 13), ("udp", 14), ("pppd", 15), ("term-ASC", 16), ("term-BIN", 17), ("reverse-SSH", 18), ("ssh", 19), ("rfc-2217", 20)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portMode.setStatus('current')
if mibBuilder.loadTexts: portMode.setDescription('The operation mode of the serial port. Note that changing this setting may also change the application.')
application = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2))
deviceControl = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 1))
deviceControlTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 1, 1), )
if mibBuilder.loadTexts: deviceControlTable.setStatus('current')
if mibBuilder.loadTexts: deviceControlTable.setDescription('The Device Control application table.')
deviceControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 1, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: deviceControlEntry.setStatus('current')
if mibBuilder.loadTexts: deviceControlEntry.setDescription('The Device Control application entry.')
deviceControlTcpAliveCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 1, 1, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceControlTcpAliveCheck.setStatus('current')
if mibBuilder.loadTexts: deviceControlTcpAliveCheck.setDescription('The TCP alive check time. 0 min: TCP connection will not be closed when the TCP connection is idle. 1 to 99 min: The NPort 6000 series device server automatically closes the TCP connection if there is no TCP activity for the given time. After the connection is closed, the NPort 6000 series device server starts listening for another Real COM driver Connection.')
deviceControlMaxConnection = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 1, 1, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceControlMaxConnection.setStatus('current')
if mibBuilder.loadTexts: deviceControlMaxConnection.setDescription('Max connection is usually used when the user needs to receive data from different hosts simultaneously. The factory default is 1. In this case, only one specific host can access this port of the NPort 6000 series device server, and the Real COM driver on that host will have full control over the port. Max. connection = 1: Allows only a single hosts Real COM driver to open the specific NPort 6000 series device servers serial port. Max connection = 2 to 8: Allows 2 to 8 hosts Real COM drivers to open the specific NPort 6000 series device servers serial port, at the same time. When multiple hosts Real COM drivers open the serial port at the same time, the COM driver only provides a pure data tunnel without control ability. The serial port parameter will use the firmware settings instead of getting the settings from your application program (AP).')
deviceControlIgnoreJammedIp = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceControlIgnoreJammedIp.setStatus('current')
if mibBuilder.loadTexts: deviceControlIgnoreJammedIp.setDescription("Previously, when Max connection was set to a value greater than 1, and the serial device was transmitting data, if any one of the connected hosts stopped responding, the serial device would wait until the data had been transmitted successfully before transmitting the second group of data to all hosts. Currently, if you select Yes for 'Ignore jammed IP,' the host that is not responding will be ignored, but the data will still be transmitted to the other hosts.")
deviceControlAllowDriverControl = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceControlAllowDriverControl.setStatus('current')
if mibBuilder.loadTexts: deviceControlAllowDriverControl.setDescription('If Max connection is greater than 1, NPort will ignore driver control commands from all connected hosts. However, if you set Allow driver control to YES, control commands will be accepted. Note that since the NPort 6000 series device server may get configuration changes from multiple hosts, the most recent command received will take precedence.')
deviceControlSecure = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceControlSecure.setStatus('current')
if mibBuilder.loadTexts: deviceControlSecure.setDescription("If 'Secure' is enabled, the data on the Ethernet will be encrypted with SSL.")
deviceControlTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 1, 1, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceControlTcpPort.setStatus('current')
if mibBuilder.loadTexts: deviceControlTcpPort.setDescription("The 'TCP port' is the TCP port that the NPort 6000 series device server uses to listen to connections, and that other devices must use to contact the NPort 6000 series device server.")
deviceControlConnectionDownRTS = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("goes-low", 1), ("always-high", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceControlConnectionDownRTS.setStatus('current')
if mibBuilder.loadTexts: deviceControlConnectionDownRTS.setDescription("For some applications, serial devices must obtain the Ethernet link status by reading the RTS signal from the NPort 6000 series device server. If this setting is set to 'goes low,' RTS will change its state to low if the Ethernet link is off.")
deviceControlConnectionDownDTR = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("goes-low", 1), ("always-high", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: deviceControlConnectionDownDTR.setStatus('current')
if mibBuilder.loadTexts: deviceControlConnectionDownDTR.setDescription("For some applications, serial devices must obtain the Ethernet link status by reading the DTR signal from the NPort 6000 series device server. If this setting is set to 'goes low,' DTR will change its state to low if the Ethernet link is off.")
socket = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2))
socketTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1), )
if mibBuilder.loadTexts: socketTable.setStatus('current')
if mibBuilder.loadTexts: socketTable.setDescription('The Socket application table.')
socketEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: socketEntry.setStatus('current')
if mibBuilder.loadTexts: socketEntry.setDescription('The Socket application entry.')
socketTcpAliveCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpAliveCheck.setStatus('current')
if mibBuilder.loadTexts: socketTcpAliveCheck.setDescription('The TCP alive check time. 0 min: TCP connection will not be closed when the TCP connection is idle. 1 to 99 min: The NPort 6000 series device server automatically closes the TCP connection if there is no TCP activity for the given time. After the connection is closed, the NPort 6000 series device server starts listening for another Real COM driver Connection.')
socketInactivityTime = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketInactivityTime.setStatus('current')
if mibBuilder.loadTexts: socketInactivityTime.setDescription('0 ms: TCP connection is not closed when the serial line is idle. 1-65535 ms: The NPort 6000 series device server automatically closes the TCP connection if there is no serial data activity for the given time. After the connection is closed, the NPort 6000 series device server starts listening for another TCP connection.')
socketMaxConnection = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketMaxConnection.setStatus('current')
if mibBuilder.loadTexts: socketMaxConnection.setDescription('Max connection is usually used when the user needs to receive data from different hosts simultaneously. The factory default is 1. In this case, only one specific host can access this port of the NPort 6000 series device server, and the Real COM driver on that host will have full control over the port. Max. connection 1: Allows only a single hosts Real COM driver to open the specific NPort 6000 series device servers serial port. Max connection 2 to 8: Allows 2 to 8 hosts Real COM drivers to open the specific NPort 6000 series device servers serial port, at the same time. When multiple hosts Real COM drivers open the serial port at the same time, the COM driver only provides a pure data tunnel without control ability. The serial port parameter will use the firmware settings instead of depending on your application program (AP).')
socketIgnoreJammedIp = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketIgnoreJammedIp.setStatus('current')
if mibBuilder.loadTexts: socketIgnoreJammedIp.setDescription("Previously, when Max connection was set to a value greater than 1, and the serial device was transmitting data, if any one of the connected hosts stopped responding, the serial device would wait until the data had been transmitted successfully before transmitting the second group of data to all hosts. Currently, if you select Yes for 'Ignore jammed IP,' the host that is not responding will be ignored, but the data will still be transmitted to the other hosts.")
socketAllowDriverControl = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketAllowDriverControl.setStatus('current')
if mibBuilder.loadTexts: socketAllowDriverControl.setDescription('If Max connection is greater than 1, NPort will ignore driver control commands from all connected hosts. However, if you set Allow driver control to YES, control commands will be accepted. Note that since the NPort 6000 series device server may get configuration changes from multiple hosts, the most recent command received will take precedence.')
socketSecure = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketSecure.setStatus('current')
if mibBuilder.loadTexts: socketSecure.setDescription('If Secure is enabled, data sent over the Ethernet will be encrypted with SSL.')
socketTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpPort.setStatus('current')
if mibBuilder.loadTexts: socketTcpPort.setDescription("The 'TCP port' is the TCP port that the NPort 6000 series device server uses to listen to connections, and that other devices must use to contact the NPort 6000 series device server.")
socketCmdPort = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 8), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketCmdPort.setStatus('current')
if mibBuilder.loadTexts: socketCmdPort.setDescription("The 'Command port' is a TCP port used to listen for IP-Serial Lib commands from the host. In order to prevent a TCP port conflict with other applications, the user can set the Command port to another port if needed. IP-Serial Lib will automatically check the Command Port on the NPort 6000 series device server to avoid the need for the user to configure the program.")
socketTcpServerConnectionDownRTS = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("goes-low", 1), ("always-high", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpServerConnectionDownRTS.setStatus('current')
if mibBuilder.loadTexts: socketTcpServerConnectionDownRTS.setDescription("For some applications, serial devices must obtain the Ethernet link status from the RTS signal sent by the NPort 6000 series device server. If this setting is set to 'goes low,' RTS will change the state to low if the Ethernet link is off.")
socketTcpServerConnectionDownDTR = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("goes-low", 1), ("always-high", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpServerConnectionDownDTR.setStatus('current')
if mibBuilder.loadTexts: socketTcpServerConnectionDownDTR.setDescription("For some applications, serial devices must obtain the Ethernet link status from the DTR signal sent by the NPort 6000 series device server. If this setting is set to 'goes low,' DTR will change the state to low if the Ethernet link is off.")
socketTcpClientDestinationAddress1 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 11), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDestinationAddress1.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDestinationAddress1.setDescription('The NPort 6000 series device server can connect actively to a remote host that has this IP address.')
socketTcpClientDestinationPort1 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDestinationPort1.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDestinationPort1.setDescription('The NPort 6000 series device server can connect actively to a remote host that has this TCP port number.')
socketTcpClientDestinationAddress2 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 13), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDestinationAddress2.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDestinationAddress2.setDescription('The NPort 6000 series device server can connect actively to a remote host that has this IP address.')
socketTcpClientDestinationPort2 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 14), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDestinationPort2.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDestinationPort2.setDescription('The NPort 6000 series device server can connect actively to a remote host that has this TCP port number.')
socketTcpClientDestinationAddress3 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 15), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDestinationAddress3.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDestinationAddress3.setDescription('The NPort 6000 series device server can connect actively to a remote host that has this IP address.')
socketTcpClientDestinationPort3 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDestinationPort3.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDestinationPort3.setDescription('The NPort 6000 series device server can connect actively to a remote host that has this port number.')
socketTcpClientDestinationAddress4 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 17), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDestinationAddress4.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDestinationAddress4.setDescription('The NPort 6000 series device server can connect actively to a remote host that has this IP address.')
socketTcpClientDestinationPort4 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 18), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDestinationPort4.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDestinationPort4.setDescription('The NPort 6000 series device server can connect actively to a remote host that has this port number.')
socketTcpClientDesignatedLocalPort1 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 19), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDesignatedLocalPort1.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDesignatedLocalPort1.setDescription('The local TCP port used to connect actively to the remote host.')
socketTcpClientDesignatedLocalPort2 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 20), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDesignatedLocalPort2.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDesignatedLocalPort2.setDescription('The local TCP port used to connect actively to the remote host.')
socketTcpClientDesignatedLocalPort3 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 21), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDesignatedLocalPort3.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDesignatedLocalPort3.setDescription('The local TCP port used to connect actively to the remote host.')
socketTcpClientDesignatedLocalPort4 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 22), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientDesignatedLocalPort4.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientDesignatedLocalPort4.setDescription('The local TCP port used to connect actively to the remote host.')
socketTcpClientConnectionControl = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(257, 258, 514, 1028, 260, 2056, 264))).clone(namedValues=NamedValues(("startup-None", 257), ("anyCharacter-None", 258), ("anyCharacter-InactivityTime", 514), ("dsrOn-DSR-Off", 1028), ("dsrOn-None", 260), ("dcdOn-DCD-Off", 2056), ("dcdOn-None", 264)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketTcpClientConnectionControl.setStatus('current')
if mibBuilder.loadTexts: socketTcpClientConnectionControl.setDescription('Events that cause the TCP client to drop the connection.')
socketUdpDestinationAddress1Begin = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 24), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationAddress1Begin.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationAddress1Begin.setDescription('The NPort 6000 series device server can actively send UDP packets to remote hosts whose IP addresses are in the range between this parameter and socketUdpDestinationAddress1End.')
socketUdpDestinationAddress1End = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 25), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationAddress1End.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationAddress1End.setDescription('The NPort 6000 series device server can actively send UDP packets to remote hosts whose IP addresses are in the range between socketUdpDestinationAddress1Begin and this parameter.')
socketUdpDestinationPort1 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 26), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationPort1.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationPort1.setDescription('The NPort 6000 series device server can actively send UDP packets to remote hosts that have this port number.')
socketUdpDestinationAddress2Begin = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 27), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationAddress2Begin.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationAddress2Begin.setDescription('The NPort 6000 series device server can actively send UDP packets to remote hosts whose IP addresses are in the range between this parameter and socketUdpDestinationAddress2End.')
socketUdpDestinationAddress2End = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 28), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationAddress2End.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationAddress2End.setDescription('The NPort 6000 series device server can actively send UDP packets to the remote hosts whose IP addresses are in the range between socketUdpDestinationAddress2Begin and this parameter.')
socketUdpDestinationPort2 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 29), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationPort2.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationPort2.setDescription('The NPort 6000 series device server can actively send UDP packets to the remote hosts that have this TCP port number.')
socketUdpDestinationAddress3Begin = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 30), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationAddress3Begin.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationAddress3Begin.setDescription('The NPort 6000 series device server can actively send UDP packets to remote hosts whose IP addresses are in the range between this parameter and socketUdpDestinationAddress3End.')
socketUdpDestinationAddress3End = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 31), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationAddress3End.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationAddress3End.setDescription('The NPort 6000 series device server can actively send UDP packets to remote hosts whose IP addresses are in the range between socketUdpDestinationAddress3Begin and this parameter.')
socketUdpDestinationPort3 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 32), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationPort3.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationPort3.setDescription('The NPort 6000 series device server can actively send UDP packets to remote hosts that have this TCP port number.')
socketUdpDestinationAddress4Begin = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 33), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationAddress4Begin.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationAddress4Begin.setDescription('The NPort 6000 series device server can actively send UDP packets to remote hosts whose IP addresses are in the range between this parameter and socketUdpDestinationAddress4End.')
socketUdpDestinationAddress4End = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 34), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationAddress4End.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationAddress4End.setDescription('The NPort 6000 series device server can actively send UDP packets to remote hosts whose IP addresses are in the range between socketUdpDestinationAddress4Begin and this parameter.')
socketUdpDestinationPort4 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 35), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpDestinationPort4.setStatus('current')
if mibBuilder.loadTexts: socketUdpDestinationPort4.setDescription('The NPort 6000 series device server can actively send UDP packets to remote hosts that have this TCP port number.')
socketUdpLocalListenPort = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 2, 1, 1, 36), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: socketUdpLocalListenPort.setStatus('current')
if mibBuilder.loadTexts: socketUdpLocalListenPort.setDescription('The UDP port that the NPort 6000 series device server listens to and that other devices must use to contact the NPort 6000 series device server.')
pairConnection = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 3))
pairConnectionTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 3, 1), )
if mibBuilder.loadTexts: pairConnectionTable.setStatus('current')
if mibBuilder.loadTexts: pairConnectionTable.setDescription('The Pair Connection table.')
pairConnectionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 3, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: pairConnectionEntry.setStatus('current')
if mibBuilder.loadTexts: pairConnectionEntry.setDescription('The Pair Connection entry.')
pairConnectionTcpAliveCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 3, 1, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pairConnectionTcpAliveCheck.setStatus('current')
if mibBuilder.loadTexts: pairConnectionTcpAliveCheck.setDescription('The TCP alive check time. 0 min: TCP connection will not be closed if the TCP connection becomes idle. 1 to 99 min: The NPort 6000 series device server automatically closes the TCP connection if there is no TCP activity for the given time. After the connection is closed, the NPort 6000 series device server starts listening for another Real COM driver Connection.')
pairConnectionSecure = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pairConnectionSecure.setStatus('current')
if mibBuilder.loadTexts: pairConnectionSecure.setDescription('If Secure is enabled, the Ethernet data will be encrypted using SSL.')
pairConnectionDestinationAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 3, 1, 1, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pairConnectionDestinationAddress.setStatus('current')
if mibBuilder.loadTexts: pairConnectionDestinationAddress.setDescription("The Pair Connection 'Master' will contact the network host that has the specified IP address.")
pairConnectionDestinationPort = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 3, 1, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pairConnectionDestinationPort.setStatus('current')
if mibBuilder.loadTexts: pairConnectionDestinationPort.setDescription("Data will be transmitted through the specified port number. Note that you must configure the same TCP port number for the device server acting as the Pair Connection 'Slave.'")
pairConnectionTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 3, 1, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: pairConnectionTcpPort.setStatus('current')
if mibBuilder.loadTexts: pairConnectionTcpPort.setDescription("This port number must be the same port number that was set up for the Pair Connection 'Master' device server.")
ethernetModem = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 4))
ethernetModemTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 4, 1), )
if mibBuilder.loadTexts: ethernetModemTable.setStatus('current')
if mibBuilder.loadTexts: ethernetModemTable.setDescription('The Ethernet Modem table.')
ethernetModemEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 4, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: ethernetModemEntry.setStatus('current')
if mibBuilder.loadTexts: ethernetModemEntry.setDescription('The Ethernet Modem entry.')
ethernetModemTcpAliveCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 4, 1, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ethernetModemTcpAliveCheck.setStatus('current')
if mibBuilder.loadTexts: ethernetModemTcpAliveCheck.setDescription('The TCP alive check time. 0 min: TCP connection will not be closed if the TCP connection becomes idle. 1 to 99 min: The NPort 6000 series device server automatically closes the TCP connection if there is no TCP activity for the given time. After the connection is closed, the NPort 6000 series device server starts listening for another Real COM driver Connection.')
ethernetModemTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 4, 1, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ethernetModemTcpPort.setStatus('current')
if mibBuilder.loadTexts: ethernetModemTcpPort.setDescription('The local TCP port used to listen for incoming calls from a remote ethernet modem.')
terminal = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5))
terminalTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1), )
if mibBuilder.loadTexts: terminalTable.setStatus('current')
if mibBuilder.loadTexts: terminalTable.setDescription('The Terminal Application table.')
terminalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: terminalEntry.setStatus('current')
if mibBuilder.loadTexts: terminalEntry.setDescription('The Terminal Application entry.')
terminalTcpAliveCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalTcpAliveCheck.setStatus('current')
if mibBuilder.loadTexts: terminalTcpAliveCheck.setDescription('The TCP alive check time. 0 min: TCP connection will not be closed if the TCP connection becomes idle. 1 to 99 min: The NPort 6000 series device server automatically closes the TCP connection if there is no TCP activity for the given time. After the connection is closed, the NPort 6000 series device server starts listening for another Real COM driver Connection from another host.')
terminalInactivityTime = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalInactivityTime.setStatus('current')
if mibBuilder.loadTexts: terminalInactivityTime.setDescription("0 min: TCP connection will not be closed if the serial line becomes idle. 0-99 min: The NPort 6000 series device server automatically closes the TCP connection if there is no serial data activity for the given time. After the connection is closed, the NPort 6000 series device server starts listening for another host's TCP connection.")
terminalAutoLinkProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("telnet", 1), ("rlogin", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalAutoLinkProtocol.setStatus('current')
if mibBuilder.loadTexts: terminalAutoLinkProtocol.setDescription("If Auto-Link protocol is set to 'None,' the NPort 6000 series device server will not connect to the host automatically. If Auto-Link protocol is set to 'Telnet,' the NPort 6000 series device server will connect to the host automatically using Telnet. If Auto-Link protocol is set to 'Rlogin,' the NPort 6000 series device server will connect to the host automatically using rlogin.")
terminalPrimaryHostAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalPrimaryHostAddress.setStatus('current')
if mibBuilder.loadTexts: terminalPrimaryHostAddress.setDescription("If specified, designates a 'permanent' host to which the terminal will always be connected.")
terminalSecondHostAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalSecondHostAddress.setStatus('current')
if mibBuilder.loadTexts: terminalSecondHostAddress.setDescription("If specified, designates a 'permanent' host to which the terminal will always be connected.")
terminalTelnetTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalTelnetTcpPort.setStatus('current')
if mibBuilder.loadTexts: terminalTelnetTcpPort.setDescription("By default, the Telnet TCP port number is set to 23, which is the default TCP port number for Telnet. If you need to telnet to this NPort 6000 series device server's serial port, set the Telnet TCP port to a different number.")
terminalSshTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalSshTcpPort.setStatus('current')
if mibBuilder.loadTexts: terminalSshTcpPort.setDescription("By default, the SSH TCP port number is set to 22, which is the default TCP port number for SSH. If you need to use SSH to connect to this NPort 6000 series device server's serial port, set the Telnet TCP port to a different number.")
terminalType = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalType.setStatus('current')
if mibBuilder.loadTexts: terminalType.setDescription('Defines the terminal type for outgoing connections.')
terminalMaxSessions = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalMaxSessions.setStatus('current')
if mibBuilder.loadTexts: terminalMaxSessions.setDescription('Configure the max. number of sessions.')
terminalChangeSession = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 10), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalChangeSession.setStatus('current')
if mibBuilder.loadTexts: terminalChangeSession.setDescription('Defines the control character used to change the terminal session.')
terminalQuit = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 11), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalQuit.setStatus('current')
if mibBuilder.loadTexts: terminalQuit.setDescription('Defines the control character used to quit a terminal session.')
terminalBreak = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 12), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalBreak.setStatus('current')
if mibBuilder.loadTexts: terminalBreak.setDescription('Defines the control character used to send a Telnet BREAK message.')
terminalInterrupt = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 13), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalInterrupt.setStatus('current')
if mibBuilder.loadTexts: terminalInterrupt.setDescription('Defines the control character used to send a Telnet INTERRUPT message.')
terminalAuthenticationType = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("local", 1), ("radius", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalAuthenticationType.setStatus('current')
if mibBuilder.loadTexts: terminalAuthenticationType.setDescription('The terminal authentication type.')
terminalAutoLoginPrompt = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 15), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalAutoLoginPrompt.setStatus('current')
if mibBuilder.loadTexts: terminalAutoLoginPrompt.setDescription('The prompt string for the automatic login ID. When the NPort 6000 series device server receives this string from a remote telnet/rlogin server, it will automatically reply with the user ID.')
terminalPasswordPrompt = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 16), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalPasswordPrompt.setStatus('current')
if mibBuilder.loadTexts: terminalPasswordPrompt.setDescription('The prompt string for the automatic login password. When the NPort 6000 series device server receives this string from a remote telnet/rlogin server, it will automatically reply with the user password.')
terminalLoginUserName = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 17), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalLoginUserName.setStatus('current')
if mibBuilder.loadTexts: terminalLoginUserName.setDescription('Terminal login ID for automatic login.')
terminalLoginPassword = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 5, 1, 1, 18), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: terminalLoginPassword.setStatus('current')
if mibBuilder.loadTexts: terminalLoginPassword.setDescription('Terminal login password for automatic login.')
reverseTerminal = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 6))
reverseTerminalTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 6, 1), )
if mibBuilder.loadTexts: reverseTerminalTable.setStatus('current')
if mibBuilder.loadTexts: reverseTerminalTable.setDescription('The Reverse Terminal Application table.')
reverseTerminalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 6, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: reverseTerminalEntry.setStatus('current')
if mibBuilder.loadTexts: reverseTerminalEntry.setDescription('The Reverse Terminal Application entry.')
reverseTerminalTcpAliveCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 6, 1, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: reverseTerminalTcpAliveCheck.setStatus('current')
if mibBuilder.loadTexts: reverseTerminalTcpAliveCheck.setDescription('The TCP alive check time. 0 min: TCP connection will not be closed when the TCP connection becomes idle. 1 to 99 min: The NPort 6000 series device server automatically closes the TCP connection if there is no TCP activity for the given time. After the connection is closed, the NPort 6000 series device server starts listening for another Real COM driver Connection.')
reverseTerminalInactivityTime = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 6, 1, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: reverseTerminalInactivityTime.setStatus('current')
if mibBuilder.loadTexts: reverseTerminalInactivityTime.setDescription('0 min: TCP connection will not be closed if the serial line becomes idle. 0-99 min: The NPort 6000 series device server automatically closes the TCP connection if there is no serial data activity for the given time. After the connection is closed, the NPort 6000 series device server starts listening for another TCP connection.')
reverseTerminalTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 6, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: reverseTerminalTcpPort.setStatus('current')
if mibBuilder.loadTexts: reverseTerminalTcpPort.setDescription("Each of the NPort 6000 series device server's serial ports are mapped to a TCP port.")
reverseTerminalAuthenticationType = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 6, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("local", 1), ("radius", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: reverseTerminalAuthenticationType.setStatus('current')
if mibBuilder.loadTexts: reverseTerminalAuthenticationType.setDescription('The reverse terminal authentication type.')
reverseTerminalMapKeys = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("cr-lf", 0), ("cr", 1), ("lf", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: reverseTerminalMapKeys.setStatus('current')
if mibBuilder.loadTexts: reverseTerminalMapKeys.setDescription("If data received through NPort 6000 series device server's Ethernet port is sent using the 'enter' command, the data will be transmitted out through the serial port with an added 1. 'carriage return + line feed' if you select the 'cr-lf' option (i.e., the cursor will jump to the next line, and return to the first character of the line) 2. 'carriage return' if you select the 'cr' option (i.e., the cursor will return to the first character of the line 3. 'line feed' if you select the 'lf' option. (i.e., the cursor will jump to the next line, but not move horizontally)")
printer = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 7))
printerTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 7, 1), )
if mibBuilder.loadTexts: printerTable.setStatus('current')
if mibBuilder.loadTexts: printerTable.setDescription('The Printer Application table.')
printerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 7, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: printerEntry.setStatus('current')
if mibBuilder.loadTexts: printerEntry.setDescription('The Printer Application entry.')
printerTcpAliveCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 7, 1, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: printerTcpAliveCheck.setStatus('current')
if mibBuilder.loadTexts: printerTcpAliveCheck.setDescription('The TCP alive check time. 0 min: TCP connection will not be closed if the TCP connection becomes idle. 1 to 99 min: The NPort 6000 series device server automatically closes the TCP connection if there is no TCP activity for the given time. After the connection is closed, the NPort 6000 series device server starts listening for another Real COM driver Connection.')
printerTcpPort = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 7, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: printerTcpPort.setStatus('current')
if mibBuilder.loadTexts: printerTcpPort.setDescription('The host uses this value to determine the Group to which the printer attached to this serial port belongs. These values are fixed, and cannot be changed by the user.')
printerGroup = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 7, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15))).clone(namedValues=NamedValues(("group1", 0), ("group2", 1), ("group3", 2), ("group4", 3), ("group5", 4), ("group6", 5), ("group7", 6), ("group8", 7), ("group9", 8), ("group10", 9), ("group11", 10), ("group12", 11), ("group13", 12), ("group14", 13), ("group15", 14), ("group16", 15)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: printerGroup.setStatus('current')
if mibBuilder.loadTexts: printerGroup.setDescription("Groups printers attached to different ports. Printers in the same group will share the printing load for printing requests to that group of printers. E.g., setting the NPort 6000 series device server's serial ports 1, 3, and 6 for 'Group01' will allow the printers attached to these three ports to act essentially as one printer.")
printerQueueNameRaw = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 7, 1, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: printerQueueNameRaw.setStatus('current')
if mibBuilder.loadTexts: printerQueueNameRaw.setDescription('Specify the name of the print queue (in RAW mode).')
printerQueueNameASCII = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 7, 1, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: printerQueueNameASCII.setStatus('current')
if mibBuilder.loadTexts: printerQueueNameASCII.setDescription('Specify the name of the print queue (in ASCII mode).')
printerAppendFromFeed = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 7, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: printerAppendFromFeed.setStatus('current')
if mibBuilder.loadTexts: printerAppendFromFeed.setDescription('Specify paging.')
dial = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8))
dialTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1), )
if mibBuilder.loadTexts: dialTable.setStatus('current')
if mibBuilder.loadTexts: dialTable.setDescription('The Dial In/Out table.')
dialEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: dialEntry.setStatus('current')
if mibBuilder.loadTexts: dialEntry.setDescription('The Dial In/Out entry.')
dialTERMBINMode = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialTERMBINMode.setStatus('current')
if mibBuilder.loadTexts: dialTERMBINMode.setDescription('Select Yes to enable a Binary Terminal connection.')
dialPPPDMode = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialPPPDMode.setStatus('current')
if mibBuilder.loadTexts: dialPPPDMode.setDescription('Select Yes to enable a PPPD connection.')
dialSLIPDMode = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialSLIPDMode.setStatus('current')
if mibBuilder.loadTexts: dialSLIPDMode.setDescription('Select Yes to enable a SLIPD Terminal connection.')
dialAuthType = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("local", 1), ("radius", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialAuthType.setStatus('current')
if mibBuilder.loadTexts: dialAuthType.setDescription('The dial in/out authentication type.')
dialDisconnectBy = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 4))).clone(namedValues=NamedValues(("none", 0), ("dcd-off", 2), ("dsr-off", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialDisconnectBy.setStatus('current')
if mibBuilder.loadTexts: dialDisconnectBy.setDescription("If this field is set to 'dcd-off,' the connection will be disconnected when the DCD signal is off. If this field is set to 'dsr-off,' the connection will be disconnected when the DSR signal is off.")
dialDestinationIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialDestinationIpAddress.setStatus('current')
if mibBuilder.loadTexts: dialDestinationIpAddress.setDescription('Destination IP address is the IP address of the remote dial-in/dial-out server.')
dialSourceIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 7), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialSourceIpAddress.setStatus('current')
if mibBuilder.loadTexts: dialSourceIpAddress.setDescription('Source IP address is IP address of this serial port. The NPort 6000 series device server will automatically assign an IP address for the port. We recommend leaving this field blank.')
dialIpNetmask = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 8), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialIpNetmask.setStatus('current')
if mibBuilder.loadTexts: dialIpNetmask.setDescription('The IP netmask used for dial-in/dial-out networks (PPP or SLIP).')
dialTcpIpCompression = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialTcpIpCompression.setStatus('current')
if mibBuilder.loadTexts: dialTcpIpCompression.setDescription("Depends on whether the remote user's application requests compression.")
dialInactivityTime = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialInactivityTime.setStatus('current')
if mibBuilder.loadTexts: dialInactivityTime.setDescription('0 ms: TCP connection will not be closed if the serial line becomes idle. 1-65535 ms: The NPort 6000 series device server automatically closes the TCP connection if there is no serial data activity for the given time. After the connection is closed, the NPort 6000 series device server starts listening for another TCP connection.')
dialLinkQualityReport = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialLinkQualityReport.setStatus('current')
if mibBuilder.loadTexts: dialLinkQualityReport.setDescription('Choose YES if you are using software to collect Link quality information.')
dialOutgoingPAPID = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 12), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialOutgoingPAPID.setStatus('current')
if mibBuilder.loadTexts: dialOutgoingPAPID.setDescription('Dial-out user ID account.')
dialPAPPassword = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 13), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialPAPPassword.setStatus('current')
if mibBuilder.loadTexts: dialPAPPassword.setDescription('Dial-out user password.')
dialIncomingPAPCheck = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 2, 8, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("local", 1), ("radius", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dialIncomingPAPCheck.setStatus('current')
if mibBuilder.loadTexts: dialIncomingPAPCheck.setDescription('The Dial in/out incoming PAP check type.')
dataPacking = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 3))
dataPackingPortTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 3, 1), )
if mibBuilder.loadTexts: dataPackingPortTable.setStatus('current')
if mibBuilder.loadTexts: dataPackingPortTable.setDescription('The Data Packing table.')
dataPackingPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 3, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: dataPackingPortEntry.setStatus('current')
if mibBuilder.loadTexts: dataPackingPortEntry.setDescription('The Data Packing entry.')
portPacketLength = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 3, 1, 1, 1), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPacketLength.setStatus('current')
if mibBuilder.loadTexts: portPacketLength.setDescription('If the value is 0, the Delimiter Process will be used, regardless of the length of the data packet. If the data length (in bytes) matches the configured value, the data will be forced out. The data length can be configured from 0 to 1024 bytes. Set to 0 if you do not need to limit the length.')
portDelimiter1Enable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDelimiter1Enable.setStatus('current')
if mibBuilder.loadTexts: portDelimiter1Enable.setDescription('Enable the first delimiter character.')
portDelimiter1 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 3, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 2))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDelimiter1.setStatus('current')
if mibBuilder.loadTexts: portDelimiter1.setDescription('The first delimiter character, in hex decimal.')
portDelimiter2Enable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDelimiter2Enable.setStatus('current')
if mibBuilder.loadTexts: portDelimiter2Enable.setDescription('Enable the second delimiter character.')
portDelimiter2 = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 3, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 2))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDelimiter2.setStatus('current')
if mibBuilder.loadTexts: portDelimiter2.setDescription('The second delimiter character, in hex decimal.')
portDelimiterProcess = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4, 8))).clone(namedValues=NamedValues(("doNothing", 1), ("delimiterAddOne", 2), ("delimiterAddTwo", 4), ("stripDelimiter", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDelimiterProcess.setStatus('current')
if mibBuilder.loadTexts: portDelimiterProcess.setDescription("'delimiterAddOne' or 'delimiterAddTwo': The data will be transmitted when an additional byte (for 'delimiterAddOne'), or an additional 2 bytes (for 'delimiterAddTwo') of data is received after receiving the Delimiter. 'stripDelimiter': When the delimiter is received, the Delimiter is deleted (i.e., stripped), and the remaining data is transmitted. 'doNothing': The data will be transmitted when the delimiter is received.")
portForceTransmit = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 1, 3, 1, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portForceTransmit.setStatus('current')
if mibBuilder.loadTexts: portForceTransmit.setDescription("0: Disable the force transmit timeout. 1 to 65535: Forces the NPort 6000 series device server's TCP/IP protocol software to try to pack serial data received during the specified time into the same data frame.")
comParamSetting = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2))
comParamPortTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2, 1), )
if mibBuilder.loadTexts: comParamPortTable.setStatus('current')
if mibBuilder.loadTexts: comParamPortTable.setDescription('The Communication Parameters table.')
comParamPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: comParamPortEntry.setStatus('current')
if mibBuilder.loadTexts: comParamPortEntry.setDescription('The Communication Parameters entry.')
portAlias = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2, 1, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portAlias.setStatus('current')
if mibBuilder.loadTexts: portAlias.setDescription("Port alias is included to allow easy identification of the serial devices that are connected to NPort 6000 series device server's serial port.")
portInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("rs-232", 0), ("rs-422", 1), ("rs-485-2-wire", 2), ("rs-485-4-wire", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portInterface.setStatus('current')
if mibBuilder.loadTexts: portInterface.setDescription('The serial interface that the port works on.')
portBaudRate = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19))).clone(namedValues=NamedValues(("b50", 0), ("b75", 1), ("b110", 2), ("b134", 3), ("b150", 4), ("b300", 5), ("b600", 6), ("b1200", 7), ("b1800", 8), ("b2400", 9), ("b4800", 10), ("b7200", 11), ("b9600", 12), ("b19200", 13), ("b38400", 14), ("b57600", 15), ("b115200", 16), ("b230400", 17), ("b460800", 18), ("b921600", 19)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBaudRate.setStatus('current')
if mibBuilder.loadTexts: portBaudRate.setDescription("The port's speed in bits per second. You can choose a standard baud rate by indexing.")
portBaudRateManual = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2, 1, 1, 4), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBaudRateManual.setStatus('current')
if mibBuilder.loadTexts: portBaudRateManual.setDescription("The port's speed in bits per second. You can specify a baud rate manually.")
portDataBits = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("d5", 0), ("d6", 1), ("d7", 2), ("d8", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDataBits.setStatus('current')
if mibBuilder.loadTexts: portDataBits.setDescription('The number of bits in a character for the port.')
portStopBits = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("s1", 0), ("s15", 1), ("s2", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portStopBits.setStatus('current')
if mibBuilder.loadTexts: portStopBits.setDescription('The number of stop bits for the port.')
portParity = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 0), ("odd", 1), ("even", 2), ("mark", 3), ("space", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portParity.setStatus('current')
if mibBuilder.loadTexts: portParity.setDescription('The parity bit for the port.')
portFlowControl = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("rts-cts", 1), ("xon-xoff", 2), ("dtr-dsr", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFlowControl.setStatus('current')
if mibBuilder.loadTexts: portFlowControl.setDescription('The flow-control method for the port.')
portFIFO = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portFIFO.setStatus('current')
if mibBuilder.loadTexts: portFIFO.setDescription("Enable or disable the port's FIFO.")
dataBuffering = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 3))
dataBufferingPortTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 3, 1), )
if mibBuilder.loadTexts: dataBufferingPortTable.setStatus('current')
if mibBuilder.loadTexts: dataBufferingPortTable.setDescription('The Data Buffering/Log table.')
dataBufferingPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 3, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: dataBufferingPortEntry.setStatus('current')
if mibBuilder.loadTexts: dataBufferingPortEntry.setDescription('The Data Buffering/Log entry.')
portBufferingEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBufferingEnable.setStatus('current')
if mibBuilder.loadTexts: portBufferingEnable.setDescription('Enable off-line port buffering.')
portBufferingLocation = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("memory", 0), ("sdCard", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBufferingLocation.setStatus('current')
if mibBuilder.loadTexts: portBufferingLocation.setDescription('The location of off-line port buffering.')
portBufferingSDFileSize = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 3, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portBufferingSDFileSize.setStatus('current')
if mibBuilder.loadTexts: portBufferingSDFileSize.setDescription("The maximum file size of off-line port buffering, in MBytes, if the location is set to 'sdCard.'")
portSerialDataLoggingEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portSerialDataLoggingEnable.setStatus('current')
if mibBuilder.loadTexts: portSerialDataLoggingEnable.setDescription('Enable or disable serial data logging for the port.')
modemSettings = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 4))
modemSettingsPortTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 4, 1), )
if mibBuilder.loadTexts: modemSettingsPortTable.setStatus('current')
if mibBuilder.loadTexts: modemSettingsPortTable.setDescription('The Modem Settings table.')
modemSettingsPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 4, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: modemSettingsPortEntry.setStatus('current')
if mibBuilder.loadTexts: modemSettingsPortEntry.setDescription('The Modem Settings entry.')
portEnableModem = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portEnableModem.setStatus('current')
if mibBuilder.loadTexts: portEnableModem.setDescription('Enable or disable modem for this port.')
portInitialString = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 4, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 24))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portInitialString.setStatus('current')
if mibBuilder.loadTexts: portInitialString.setDescription("Set the modem's initial string to establish the connection. E.g., 'AT&S0=1' for auto-answer.")
portDialUp = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 4, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portDialUp.setStatus('current')
if mibBuilder.loadTexts: portDialUp.setDescription("Set the modem's dial-up AT command string.")
portPhoneNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 4, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portPhoneNumber.setStatus('current')
if mibBuilder.loadTexts: portPhoneNumber.setDescription('Set the number used to dial out.')
welcomeMessage = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 5))
portEnableWelcomeMessage = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portEnableWelcomeMessage.setStatus('current')
if mibBuilder.loadTexts: portEnableWelcomeMessage.setDescription('Enable or disable the welcome message.')
portMessage = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 4, 5, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 1280))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: portMessage.setStatus('current')
if mibBuilder.loadTexts: portMessage.setDescription('Set up a welcome message to greet dial-in users.')
sysManagement = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5))
miscNetworkSettings = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1))
accessibleIp = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 1))
enableAccessibleIpList = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: enableAccessibleIpList.setStatus('current')
if mibBuilder.loadTexts: enableAccessibleIpList.setDescription('Enable or disable the entire accessible IP list.')
accessibleIpListTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 1, 2), )
if mibBuilder.loadTexts: accessibleIpListTable.setStatus('current')
if mibBuilder.loadTexts: accessibleIpListTable.setDescription('Accessible IP List table.')
accessibleIpListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 1, 2, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "activeAccessibleIpList"))
if mibBuilder.loadTexts: accessibleIpListEntry.setStatus('current')
if mibBuilder.loadTexts: accessibleIpListEntry.setDescription('Accessible IP List entry.')
accessibleIpListIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 1, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accessibleIpListIndex.setStatus('current')
if mibBuilder.loadTexts: accessibleIpListIndex.setDescription('Accessible IP List index.')
activeAccessibleIpList = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: activeAccessibleIpList.setStatus('current')
if mibBuilder.loadTexts: activeAccessibleIpList.setDescription('Enable or disable this accessible IP entry.')
accessibleIpListAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 1, 2, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: accessibleIpListAddress.setStatus('current')
if mibBuilder.loadTexts: accessibleIpListAddress.setDescription("Accessible IP address. This value can be the IP address of a single host if the netmask is set to '255.255.255.255,' or it can be a network address.")
accessibleIpListNetmask = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 1, 2, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: accessibleIpListNetmask.setStatus('current')
if mibBuilder.loadTexts: accessibleIpListNetmask.setDescription('Accessible IP address netmask. The IP address and netmask are used to specify a group of remote hosts.')
snmpAgentSettings = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 2))
snmpEnable = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpEnable.setStatus('current')
if mibBuilder.loadTexts: snmpEnable.setDescription('Enable or disable SNMP agent.')
snmpContactName = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 2, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpContactName.setStatus('current')
if mibBuilder.loadTexts: snmpContactName.setDescription('The SNMP contact information usually includes an emergency contact name and telephone or pager number.')
snmpLocation = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 2, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpLocation.setStatus('current')
if mibBuilder.loadTexts: snmpLocation.setDescription('Specify the location string for SNMP agents, such as the NPort 6000 series device server. This string is usually set to the street address where the NPort 6000 series device server is physically located.')
dDNS = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 3))
dDNSEnable = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dDNSEnable.setStatus('current')
if mibBuilder.loadTexts: dDNSEnable.setDescription('Enable or disable the DDNS function.')
dDNSServerAddress = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0))).clone(namedValues=NamedValues(("dynDns_org", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dDNSServerAddress.setStatus('current')
if mibBuilder.loadTexts: dDNSServerAddress.setDescription('The DDNS server address. The DDNS is serviced by a 3-party provider. To use the DDNS function, you may need to obtain a user account from the server.')
dDNSHostName = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 3, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 60))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dDNSHostName.setStatus('current')
if mibBuilder.loadTexts: dDNSHostName.setDescription('The host name of the machine to be registered to the DDNS server.')
dDNSUserName = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 3, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 60))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dDNSUserName.setStatus('current')
if mibBuilder.loadTexts: dDNSUserName.setDescription('The user account provided by the DDNS server.')
dDNSPassword = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 3, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 60))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dDNSPassword.setStatus('current')
if mibBuilder.loadTexts: dDNSPassword.setDescription('The user password provided by the DDNS server.')
hostTable = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 4))
hostTableTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 4, 1), )
if mibBuilder.loadTexts: hostTableTable.setStatus('current')
if mibBuilder.loadTexts: hostTableTable.setDescription('Host Table.')
hostTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 4, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "hostTableIndex"))
if mibBuilder.loadTexts: hostTableEntry.setStatus('current')
if mibBuilder.loadTexts: hostTableEntry.setDescription('Host Table entry.')
hostTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 4, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hostTableIndex.setStatus('current')
if mibBuilder.loadTexts: hostTableIndex.setDescription('The Host Table index.')
hostName = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 4, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hostName.setStatus('current')
if mibBuilder.loadTexts: hostName.setDescription('The host name of the host table entry.')
hostIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 4, 1, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hostIpAddress.setStatus('current')
if mibBuilder.loadTexts: hostIpAddress.setDescription('Related IP address of the host table entry.')
routeTable = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 5))
routeTableTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 5, 1), )
if mibBuilder.loadTexts: routeTableTable.setStatus('current')
if mibBuilder.loadTexts: routeTableTable.setDescription('Route Table.')
routeTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 5, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "routeTableIndex"))
if mibBuilder.loadTexts: routeTableEntry.setStatus('current')
if mibBuilder.loadTexts: routeTableEntry.setDescription('Route Table entry.')
routeTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 5, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: routeTableIndex.setStatus('current')
if mibBuilder.loadTexts: routeTableIndex.setDescription('The Route Table index.')
gatewayRouteTable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 5, 1, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gatewayRouteTable.setStatus('current')
if mibBuilder.loadTexts: gatewayRouteTable.setDescription('The gateway property of the route table entry.')
destinationRouteTable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 5, 1, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: destinationRouteTable.setStatus('current')
if mibBuilder.loadTexts: destinationRouteTable.setDescription("The destination IP address of the route table entry. This value can be the IP address of a single host if the netmask set to '255.255.255.255,' or it can be a network address.")
netmaskRouteTable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 5, 1, 1, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netmaskRouteTable.setStatus('current')
if mibBuilder.loadTexts: netmaskRouteTable.setDescription('The destination netmask of the route table entry.')
metricRouteTable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 5, 1, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: metricRouteTable.setStatus('current')
if mibBuilder.loadTexts: metricRouteTable.setDescription('The routing metric for this route. The semantics of this metric are determined by the routing-protocol. Normally, it represents the cost for the destination.')
interfaceRouteTable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 5, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 256))).clone(namedValues=NamedValues(("port1", 0), ("port2", 1), ("port3", 2), ("port4", 3), ("port5", 4), ("port6", 5), ("port7", 6), ("port8", 7), ("port9", 8), ("port10", 9), ("port11", 10), ("port12", 11), ("port13", 12), ("port14", 13), ("port15", 14), ("port16", 15), ("lan", 256)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: interfaceRouteTable.setStatus('current')
if mibBuilder.loadTexts: interfaceRouteTable.setDescription('The value that identifies the local interface through which the next hop of this route should be reached.')
userTable = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 6))
userTableTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 6, 1), )
if mibBuilder.loadTexts: userTableTable.setStatus('current')
if mibBuilder.loadTexts: userTableTable.setDescription('User Table.')
userTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 6, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "userTableIndex"))
if mibBuilder.loadTexts: userTableEntry.setStatus('current')
if mibBuilder.loadTexts: userTableEntry.setDescription('User Table entry.')
userTableIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 6, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userTableIndex.setStatus('current')
if mibBuilder.loadTexts: userTableIndex.setDescription('The User Table index.')
userNameUserTable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 6, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: userNameUserTable.setStatus('current')
if mibBuilder.loadTexts: userNameUserTable.setDescription('The user name of this entry.')
passwordUserTable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 6, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: passwordUserTable.setStatus('current')
if mibBuilder.loadTexts: passwordUserTable.setDescription('The password of this user entry.')
phoneNumberUserTable = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 6, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: phoneNumberUserTable.setStatus('current')
if mibBuilder.loadTexts: phoneNumberUserTable.setDescription('The phone number of this user entry.')
authenticationServer = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 7))
radiusServerIp = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 7, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: radiusServerIp.setStatus('current')
if mibBuilder.loadTexts: radiusServerIp.setDescription('The IP address or domain name of the RADIUS server.')
radiusKey = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 7, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: radiusKey.setStatus('current')
if mibBuilder.loadTexts: radiusKey.setDescription('RADIUS password. Must be the same as in the RADIUS server.')
udpPortAuthenticationServer = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 7, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1645, 1812))).clone(namedValues=NamedValues(("port1645", 1645), ("port1812", 1812)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: udpPortAuthenticationServer.setStatus('current')
if mibBuilder.loadTexts: udpPortAuthenticationServer.setDescription('The UDP port of the RADIUS server.')
radiusAccounting = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 7, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: radiusAccounting.setStatus('current')
if mibBuilder.loadTexts: radiusAccounting.setDescription('Enable or disable RADIUS accounting.')
sysLogSettings = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 8))
sysLocalLog = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 8, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sysLocalLog.setStatus('current')
if mibBuilder.loadTexts: sysLocalLog.setDescription('Log the following events to the system log: System Cold Start, System Warm Start.')
networkLocalLog = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 8, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: networkLocalLog.setStatus('current')
if mibBuilder.loadTexts: networkLocalLog.setDescription('Log the following events to the system log: DHCP/BOOTP/PPPoE Get IP/Renew, NTP, Mail Fail, NTP Connect Fail, DHCP Fail, IP Conflict, Ethernet Link Down.')
configLocalLog = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 8, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: configLocalLog.setStatus('current')
if mibBuilder.loadTexts: configLocalLog.setDescription('Log the following events to the system log: Login Fail, IP Changed, Password Changed, Config Changed, Firmware Upgrade, SSL Key Import, Config Import, Config Export.')
opModeLocalLog = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 1, 8, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: opModeLocalLog.setStatus('current')
if mibBuilder.loadTexts: opModeLocalLog.setDescription('Log the following op-mode events to the system log: Connect, Disconnect, Authentication Fail, Restart.')
autoWarningSettings = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2))
eventSettings = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1))
mailWarningColdStart = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mailWarningColdStart.setStatus('current')
if mibBuilder.loadTexts: mailWarningColdStart.setDescription('Refers to starting the system from power off (contrast this with warm start). When performing a cold start, the NPort 6000 series device server will automatically issue an auto warning message by e-mail after booting up.')
mailWarningWarmStart = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mailWarningWarmStart.setStatus('current')
if mibBuilder.loadTexts: mailWarningWarmStart.setDescription('Refers to restarting the NPort 6000 without turning the power off. When performing a warm start, the NPort 6000 series device server will automatically send an e-mail after rebooting.')
mailWarningAuthFailure = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mailWarningAuthFailure.setStatus('current')
if mibBuilder.loadTexts: mailWarningAuthFailure.setDescription('The user inputs a wrong password from the Console or Administrator. When authentication failure occurs, the NPort 6000 series device server will immediately send an e-mail.')
mailWarningIpChanged = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mailWarningIpChanged.setStatus('current')
if mibBuilder.loadTexts: mailWarningIpChanged.setDescription("The user has changed the NPort 6000 series device server's IP address. When the IP address changes, the NPort 6000 series device server will send an e-mail with the new IP address before it reboots. If the NPort 6000 series device server is unable to send an e-mail message to the mail server within 15 seconds, it will reboot anyway, and abort the e-mail auto warning.")
mailWarningPasswordChanged = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mailWarningPasswordChanged.setStatus('current')
if mibBuilder.loadTexts: mailWarningPasswordChanged.setDescription("The user has changed the NPort 6000 series device server's password. When the password changes, the NPort 6000 series device server will send an e-mail with the password changed notice before it reboots. If the NPort 6000 series device server is unable to send an e-mail message to the mail server within 15 seconds, it will reboot anyway, and abort the e-mail auto warning.")
trapServerColdStart = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapServerColdStart.setStatus('current')
if mibBuilder.loadTexts: trapServerColdStart.setDescription('Refers to starting the system from power off (contrast this with warm start). When performing a cold start, the NPort 6000 series device server will automatically issue an auto warning message by sending an SNMP trap after booting up.')
trapServerWarmStart = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapServerWarmStart.setStatus('current')
if mibBuilder.loadTexts: trapServerWarmStart.setDescription('Refers to restarting the NPort 6000 without turning the power off. When performing a warm start, the NPort 6000 series device server will automatically send an SNMP trap after rebooting.')
trapServerAuthFailure = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapServerAuthFailure.setStatus('current')
if mibBuilder.loadTexts: trapServerAuthFailure.setDescription('The user inputs a wrong password from the Console or Administrator. When authentication failure occurs, the NPort 6000 series device server will immediately send an SNMP trap.')
alarmServerEthernet1LinkDown = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alarmServerEthernet1LinkDown.setStatus('current')
if mibBuilder.loadTexts: alarmServerEthernet1LinkDown.setDescription('When the built-in link1 port is off, the NPort 6000 series device server will automatically raise a d-out alarm.')
alarmServerEthernet2LinkDown = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alarmServerEthernet2LinkDown.setStatus('current')
if mibBuilder.loadTexts: alarmServerEthernet2LinkDown.setDescription('The NPort 6000 series device server can provide an extra Ethernet port via the network module. When the first port on the network module is off, the NPort 6000 series device server will automatically raise a d-out alarm.')
alarmServerEthernet3LinkDown = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alarmServerEthernet3LinkDown.setStatus('current')
if mibBuilder.loadTexts: alarmServerEthernet3LinkDown.setDescription('The NPort 6000 series device server can provide an extra Ethernet port via the network module. When the second port on the network module is off, the NPort 6000 series device server will automatically raise a d-out alarm.')
serialEventSettings = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 2))
portEventSettingsTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 2, 1), )
if mibBuilder.loadTexts: portEventSettingsTable.setStatus('current')
if mibBuilder.loadTexts: portEventSettingsTable.setDescription('Port Event Settings Table.')
portEventSettingsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 2, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: portEventSettingsEntry.setStatus('current')
if mibBuilder.loadTexts: portEventSettingsEntry.setDescription('Port Event Settings entry.')
mailDCDchange = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mailDCDchange.setStatus('current')
if mibBuilder.loadTexts: mailDCDchange.setDescription('A change in the DCD (Data Carrier Detect) signal indicates that the modem connection status has changed. For example, if the DCD signal changes to low, it indicates that the connection line is down. When the DCD signal changes to low, the NPort 6000 series device server will immediately send an e-mail message.')
trapDCDchange = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapDCDchange.setStatus('current')
if mibBuilder.loadTexts: trapDCDchange.setDescription('A change in the DCD (Data Carrier Detect) signal indicates that the modem connection status has changed. For example, if the DCD signal changes to low, it indicates that the connection line is down. When the DCD signal changes to low, the NPort 6000 series device server will immediately send an SNMP trap.')
alarmDCDchange = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alarmDCDchange.setStatus('current')
if mibBuilder.loadTexts: alarmDCDchange.setDescription('A change in the DCD (Data Carrier Detect) signal indicates that the modem connection status has changed. For example, if the DCD signal changes to low, it indicates that the connection line is down. When the DCD signal changes to low, the NPort 6000 series device server will immediately raise a d-out alarm.')
mailDSRchange = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mailDSRchange.setStatus('current')
if mibBuilder.loadTexts: mailDSRchange.setDescription('A change in the DSR (Data Set Ready) signal indicates that the data communication equipment is powered off. For example, if the DSR signal changes to low, it indicates that the data communication equipment is powered down. When the DSR signal changes to low, the NPort 6000 series device server will immediately send an e-mail message.')
trapDSRchange = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapDSRchange.setStatus('current')
if mibBuilder.loadTexts: trapDSRchange.setDescription('A change in the DSR (Data Set Ready) signal indicates that the data communication equipment is powered off. For example, if the DSR signal changes to low, it indicates that the data communication equipment is powered down. When the DSR signal changes to low, the NPort 6000 series device server will immediately send an SNMP trap.')
alarmDSRchange = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: alarmDSRchange.setStatus('current')
if mibBuilder.loadTexts: alarmDSRchange.setDescription('A change in the DSR (Data Set Ready) signal indicates that the data communication equipment is powered off. For example, if the DSR signal changes to low, it indicates that the data communication equipment is powered down. When the DSR signal changes to low, the NPort 6000 series device server will immediately raise a d-out alarm.')
emailAlert = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 3))
emailWarningMailServer = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 3, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailWarningMailServer.setStatus('current')
if mibBuilder.loadTexts: emailWarningMailServer.setDescription("The e-mail server's IP address or domain name of the e-mail warning function.")
emailRequiresAuthentication = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("non-require", 0), ("require", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailRequiresAuthentication.setStatus('current')
if mibBuilder.loadTexts: emailRequiresAuthentication.setDescription('Set if the mail server requires user login.')
emailWarningUserName = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 3, 3), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailWarningUserName.setStatus('current')
if mibBuilder.loadTexts: emailWarningUserName.setDescription('The user name used to log into the mail server if authentication is required.')
emailWarningPassword = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 3, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailWarningPassword.setStatus('current')
if mibBuilder.loadTexts: emailWarningPassword.setDescription('The password used to log into the mail server if authentication is required.')
emailWarningFromEmail = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 3, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailWarningFromEmail.setStatus('current')
if mibBuilder.loadTexts: emailWarningFromEmail.setDescription("The e-mail address to be filled in the 'From' field of the auto-warning e-mail.")
emailWarningFirstEmailAddr = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 3, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailWarningFirstEmailAddr.setStatus('current')
if mibBuilder.loadTexts: emailWarningFirstEmailAddr.setDescription('The first email address to where the auto-warning e-mail is to be sent.')
emailWarningSecondEmailAddr = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 3, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailWarningSecondEmailAddr.setStatus('current')
if mibBuilder.loadTexts: emailWarningSecondEmailAddr.setDescription('The second email address to where the auto-warning e-mail is to be sent.')
emailWarningThirdEmailAddr = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 3, 8), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailWarningThirdEmailAddr.setStatus('current')
if mibBuilder.loadTexts: emailWarningThirdEmailAddr.setDescription('The third email address to where the auto-warning e-mail is to be sent.')
emailWarningFourthEmailAddr = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 3, 9), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: emailWarningFourthEmailAddr.setStatus('current')
if mibBuilder.loadTexts: emailWarningFourthEmailAddr.setDescription('The fourth email address to where the auto-warning e-mail is to be sent.')
snmpTrap = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 4))
snmpTrapReceiverIp = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 4, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: snmpTrapReceiverIp.setStatus('current')
if mibBuilder.loadTexts: snmpTrapReceiverIp.setDescription('SNMP trap receiver IP address or domain name.')
trapVersion = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 2, 4, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("v1", 0), ("v2c", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapVersion.setStatus('current')
if mibBuilder.loadTexts: trapVersion.setDescription('SNMP trap version used for the auto-warning function.')
maintenance = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 3))
consoleSettings = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 3, 1))
httpConsole = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: httpConsole.setStatus('current')
if mibBuilder.loadTexts: httpConsole.setDescription('Enable or disable HTTP console.')
httpsConsole = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: httpsConsole.setStatus('current')
if mibBuilder.loadTexts: httpsConsole.setDescription('Enable or disable HTTPS console.')
telnetConsole = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: telnetConsole.setStatus('current')
if mibBuilder.loadTexts: telnetConsole.setDescription('Enable or disable Telnet console.')
sshConsole = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("enable", 0), ("disable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sshConsole.setStatus('current')
if mibBuilder.loadTexts: sshConsole.setDescription('Enable or disable SSH console.')
lcmReadOnlyProtect = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 3, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("writable", 0), ("readonly", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lcmReadOnlyProtect.setStatus('current')
if mibBuilder.loadTexts: lcmReadOnlyProtect.setDescription('Enable or disable LCM read-only protection. If the LCM panel is under read-only protection, users can only view the configurations from the LCM panel. Otherwise, by default, users can modify the settings directly on the LCM panel.')
resetButtonFunction = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 3, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("always-enable", 0), ("disable-after-60-sec", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: resetButtonFunction.setStatus('current')
if mibBuilder.loadTexts: resetButtonFunction.setDescription('Enable or disable reset button protection. You can disable the hardware reset button after the NPort 6000 series device server has been restarted for 60 seconds.')
loadFactoryDefault = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 3, 2))
loadFactoryDefaultSetting = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 5, 3, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("resetToFactoryDefault-ExcludingIpConfiguration", 0), ("resetToFactoryDefault", 1)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: loadFactoryDefaultSetting.setStatus('current')
if mibBuilder.loadTexts: loadFactoryDefaultSetting.setDescription("This function will reset all of the NPort 6000 series device server's settings to the factory default values. Be aware that previous settings will be lost. Selecting Reset to factory default (excluding IP configuration) will reset all settings except the IP configuration to factory defaults. Selecting Reset to factory default will reset all settings including the IP configuration to factory defaults.")
sysStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6))
s2eConnections = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 1))
monitorRemoteIpTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 1, 1), )
if mibBuilder.loadTexts: monitorRemoteIpTable.setStatus('current')
if mibBuilder.loadTexts: monitorRemoteIpTable.setDescription('Serial to Network Connections Remote IP table.')
monitorRemoteIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 1, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"), (0, "MOXA-NP6000-MIB", "remoteIpIndex"))
if mibBuilder.loadTexts: monitorRemoteIpEntry.setStatus('current')
if mibBuilder.loadTexts: monitorRemoteIpEntry.setDescription('Serial to Network Connections Remote IP entry.')
remoteIpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: remoteIpIndex.setStatus('current')
if mibBuilder.loadTexts: remoteIpIndex.setDescription('Serial to Network Connections Remote IP index.')
monitorRemoteIp = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 1, 1, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorRemoteIp.setStatus('current')
if mibBuilder.loadTexts: monitorRemoteIp.setDescription('The remote IP currently connecting to this port.')
serialPortStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2))
monitorSerialPortStatusTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2, 1), )
if mibBuilder.loadTexts: monitorSerialPortStatusTable.setStatus('current')
if mibBuilder.loadTexts: monitorSerialPortStatusTable.setDescription('Serial Port Status table.')
monitorSerialPortStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: monitorSerialPortStatusEntry.setStatus('current')
if mibBuilder.loadTexts: monitorSerialPortStatusEntry.setDescription('Serial Port Status entry.')
monitorTxCount = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorTxCount.setStatus('current')
if mibBuilder.loadTexts: monitorTxCount.setDescription('View the number of bytes transmitted from the port since the last time the port was initialized.')
monitorRxCount = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorRxCount.setStatus('current')
if mibBuilder.loadTexts: monitorRxCount.setDescription('View the number of bytes received by the port since the last time the port was initialized.')
monitorTxTotalCount = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorTxTotalCount.setStatus('current')
if mibBuilder.loadTexts: monitorTxTotalCount.setDescription('View the number of bytes transmitted from the port since the last time the system was initialized.')
monitorRxTotalCount = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorRxTotalCount.setStatus('current')
if mibBuilder.loadTexts: monitorRxTotalCount.setDescription('View the number of bytes received by the port since the last time the system was initialized.')
monitorDSR = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorDSR.setStatus('current')
if mibBuilder.loadTexts: monitorDSR.setDescription('View the current DSR status of this port.')
monitorDTR = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorDTR.setStatus('current')
if mibBuilder.loadTexts: monitorDTR.setDescription('View the current DTR status of this port.')
monitorRTS = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorRTS.setStatus('current')
if mibBuilder.loadTexts: monitorRTS.setDescription('View the current RTS status of this port.')
monitorCTS = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorCTS.setStatus('current')
if mibBuilder.loadTexts: monitorCTS.setDescription('View the current CTS status of this port.')
monitorDCD = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorDCD.setStatus('current')
if mibBuilder.loadTexts: monitorDCD.setDescription('View the current DCD status of this port.')
serialPortErrorCount = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 3))
monitorSerialPortErrorCountTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 3, 1), )
if mibBuilder.loadTexts: monitorSerialPortErrorCountTable.setStatus('current')
if mibBuilder.loadTexts: monitorSerialPortErrorCountTable.setDescription('Serial Port Error Count table.')
monitorSerialPortErrorCountEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 3, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: monitorSerialPortErrorCountEntry.setStatus('current')
if mibBuilder.loadTexts: monitorSerialPortErrorCountEntry.setDescription('Serial Port Error Count entry.')
monitorErrorCountFrame = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 3, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorErrorCountFrame.setStatus('current')
if mibBuilder.loadTexts: monitorErrorCountFrame.setDescription("View the number of 'Frame' errors for this port.")
monitorErrorCountParity = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 3, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorErrorCountParity.setStatus('current')
if mibBuilder.loadTexts: monitorErrorCountParity.setDescription("View the number of 'Parity' errors for this port.")
monitorErrorCountOverrun = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 3, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorErrorCountOverrun.setStatus('current')
if mibBuilder.loadTexts: monitorErrorCountOverrun.setDescription("View the number of 'Overrun' errors for this port.")
monitorErrorCountBreak = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 3, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorErrorCountBreak.setStatus('current')
if mibBuilder.loadTexts: monitorErrorCountBreak.setDescription("View the number of 'Break' errors for this port.")
serialPortSettings = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4))
monitorSerialPortSettingsTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4, 1), )
if mibBuilder.loadTexts: monitorSerialPortSettingsTable.setStatus('current')
if mibBuilder.loadTexts: monitorSerialPortSettingsTable.setDescription('Serial Port Settings table.')
monitorSerialPortSettingsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4, 1, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: monitorSerialPortSettingsEntry.setStatus('current')
if mibBuilder.loadTexts: monitorSerialPortSettingsEntry.setDescription('Serial Port Settings entry.')
monitorBaudRate = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorBaudRate.setStatus('current')
if mibBuilder.loadTexts: monitorBaudRate.setDescription('View the current baudrate of the port.')
monitorDataBits = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorDataBits.setStatus('current')
if mibBuilder.loadTexts: monitorDataBits.setDescription('View the number of bits per byte for the port.')
monitorStopBits = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorStopBits.setStatus('current')
if mibBuilder.loadTexts: monitorStopBits.setDescription('View the number of stop bits for the port.')
monitorParity = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 8, 24, 40, 56))).clone(namedValues=NamedValues(("none", 0), ("odd", 8), ("even", 24), ("mark", 40), ("space", 56)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorParity.setStatus('current')
if mibBuilder.loadTexts: monitorParity.setDescription('View the parity bit for the port.')
monitorRTSCTSFlowControl = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorRTSCTSFlowControl.setStatus('current')
if mibBuilder.loadTexts: monitorRTSCTSFlowControl.setDescription('View if the port is using RTS/CTS flow control.')
monitorXONXOFFFlowControl = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorXONXOFFFlowControl.setStatus('current')
if mibBuilder.loadTexts: monitorXONXOFFFlowControl.setDescription('View if the port is using XON/XOFF flow control.')
monitorDTRDSRFlowControl = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorDTRDSRFlowControl.setStatus('current')
if mibBuilder.loadTexts: monitorDTRDSRFlowControl.setDescription('View if the port is using DTR/DSR flow control.')
monitorFIFO = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorFIFO.setStatus('current')
if mibBuilder.loadTexts: monitorFIFO.setDescription('View FIFO status of this port.')
monitorInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 4, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("rs-232", 0), ("rs-422", 1), ("rs-485-2-wire", 2), ("rs-485-4-wire", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: monitorInterface.setStatus('current')
if mibBuilder.loadTexts: monitorInterface.setDescription('View the serial interface that the port is using.')
relayOutputStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5))
relayOutputEthernet1LinkDown = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("alarm", 1), ("alarm-Acked", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: relayOutputEthernet1LinkDown.setStatus('current')
if mibBuilder.loadTexts: relayOutputEthernet1LinkDown.setDescription('The relay dout status of the event that the built- in LAN port is off.')
ethernet1LinkDownAcknowledge = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0))).clone(namedValues=NamedValues(("acked", 0)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: ethernet1LinkDownAcknowledge.setStatus('current')
if mibBuilder.loadTexts: ethernet1LinkDownAcknowledge.setDescription('Acknowledge the event that the built-in LAN port is off. Acknowledgement will clear the d-out alarm before you can actually fix the problem.')
relayOutputEthernet2LinkDown = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("alarm", 1), ("alarm-Acked", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: relayOutputEthernet2LinkDown.setStatus('current')
if mibBuilder.loadTexts: relayOutputEthernet2LinkDown.setDescription('The relay dout status of the event that the first LAN port of the network module is off.')
ethernet2LinkDownAcknowledge = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0))).clone(namedValues=NamedValues(("acked", 0)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: ethernet2LinkDownAcknowledge.setStatus('current')
if mibBuilder.loadTexts: ethernet2LinkDownAcknowledge.setDescription('Acknowledge the event that the first LAN port of the network module is off. Acknowledgement will clear the d-out alarm before you can actually fix the problem.')
relayOutputEthernet3LinkDown = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("alarm", 1), ("alarm-Acked", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: relayOutputEthernet3LinkDown.setStatus('current')
if mibBuilder.loadTexts: relayOutputEthernet3LinkDown.setDescription('The relay dout status of the event that the second LAN port of the network module is off.')
ethernet3LinkDownAcknowledge = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0))).clone(namedValues=NamedValues(("acked", 0)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: ethernet3LinkDownAcknowledge.setStatus('current')
if mibBuilder.loadTexts: ethernet3LinkDownAcknowledge.setDescription('Acknowledge the event that the second LAN port of the network module is off. Acknowledgement will clear the d-out alarm before you can actually fix the problem.')
portDCDChangedStatusTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 7), )
if mibBuilder.loadTexts: portDCDChangedStatusTable.setStatus('current')
if mibBuilder.loadTexts: portDCDChangedStatusTable.setDescription('DCD changed table.')
portDCDChangedStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 7, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: portDCDChangedStatusEntry.setStatus('current')
if mibBuilder.loadTexts: portDCDChangedStatusEntry.setDescription('DCD changed entry.')
portDCDChangedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 7, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("alarm", 1), ("alarm-Acked", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portDCDChangedStatus.setStatus('current')
if mibBuilder.loadTexts: portDCDChangedStatus.setDescription('The relay dout status of the event that the DCD signal of the port is changed.')
portDCDChangedAcknowledge = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 7, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0))).clone(namedValues=NamedValues(("acked", 0)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: portDCDChangedAcknowledge.setStatus('current')
if mibBuilder.loadTexts: portDCDChangedAcknowledge.setDescription('Acknowledge the event that the DCD signal of the port is changed. Acknowledgement will clear the d-out alarm before you can actually fix the problem.')
portDSRChangedStatusTable = MibTable((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 8), )
if mibBuilder.loadTexts: portDSRChangedStatusTable.setStatus('current')
if mibBuilder.loadTexts: portDSRChangedStatusTable.setDescription('DSR changed Table.')
portDSRChangedStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 8, 1), ).setIndexNames((0, "MOXA-NP6000-MIB", "portIndex"))
if mibBuilder.loadTexts: portDSRChangedStatusEntry.setStatus('current')
if mibBuilder.loadTexts: portDSRChangedStatusEntry.setDescription('DSR changed entry.')
portDSRChangedStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 8, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("alarm", 1), ("alarm-Acked", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portDSRChangedStatus.setStatus('current')
if mibBuilder.loadTexts: portDSRChangedStatus.setDescription('The relay dout status of the event that the DSR signal of the port is changed.')
portDSRChangedAcknowledge = MibTableColumn((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 6, 5, 8, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0))).clone(namedValues=NamedValues(("acked", 0)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: portDSRChangedAcknowledge.setStatus('current')
if mibBuilder.loadTexts: portDSRChangedAcknowledge.setDescription('Acknowledge the event that the DSR signal of the port is changed. Acknowledgement will clear the d-out alarm before you can actually fix the problem.')
saveConfiguration = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 7))
saveConfig = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 7, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("save", 1)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: saveConfig.setStatus('current')
if mibBuilder.loadTexts: saveConfig.setDescription('Save the applied configuration to the NPort 6000 series device server.')
restart = MibIdentifier((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 8))
restartPorts = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 8, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15))).clone(namedValues=NamedValues(("port1", 0), ("port2", 1), ("port3", 2), ("port4", 3), ("port5", 4), ("port6", 5), ("port7", 6), ("port8", 7), ("port9", 8), ("port10", 9), ("port11", 10), ("port12", 11), ("port13", 12), ("port14", 13), ("port15", 14), ("port16", 15)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: restartPorts.setStatus('current')
if mibBuilder.loadTexts: restartPorts.setDescription('Restart serial port. Restarting will cause port reseting.')
restartSystem = MibScalar((1, 3, 6, 1, 4, 1, 8691, 2, 8, 1, 8, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("restart", 1)))).setMaxAccess("writeonly")
if mibBuilder.loadTexts: restartSystem.setStatus('current')
if mibBuilder.loadTexts: restartSystem.setDescription('Restart the NPort 6000 series device server. Be sure to save all of your configuration changes before you restart the system. Otherwise, all of the changes will be lost.')
mibBuilder.exportSymbols("MOXA-NP6000-MIB", pairConnectionDestinationAddress=pairConnectionDestinationAddress, portStopBits=portStopBits, socketSecure=socketSecure, routeTableEntry=routeTableEntry, loadFactoryDefaultSetting=loadFactoryDefaultSetting, accessibleIpListEntry=accessibleIpListEntry, emailWarningThirdEmailAddr=emailWarningThirdEmailAddr, monitorRemoteIpEntry=monitorRemoteIpEntry, socketInactivityTime=socketInactivityTime, portDSRChangedStatusEntry=portDSRChangedStatusEntry, deviceControlConnectionDownDTR=deviceControlConnectionDownDTR, monitorDTRDSRFlowControl=monitorDTRDSRFlowControl, socketUdpDestinationAddress3Begin=socketUdpDestinationAddress3Begin, terminalAutoLinkProtocol=terminalAutoLinkProtocol, remoteIpIndex=remoteIpIndex, portDelimiterProcess=portDelimiterProcess, portBufferingEnable=portBufferingEnable, monitorXONXOFFFlowControl=monitorXONXOFFFlowControl, upTime=upTime, socketUdpDestinationAddress1Begin=socketUdpDestinationAddress1Begin, dnsServer2IpAddr=dnsServer2IpAddr, winsFunction=winsFunction, radiusAccounting=radiusAccounting, userTableIndex=userTableIndex, socketTcpClientDestinationAddress3=socketTcpClientDestinationAddress3, pairConnectionTable=pairConnectionTable, nport=nport, printerAppendFromFeed=printerAppendFromFeed, terminalMaxSessions=terminalMaxSessions, dDNSServerAddress=dDNSServerAddress, terminalType=terminalType, PortList=PortList, lcmReadOnlyProtect=lcmReadOnlyProtect, portDelimiter1=portDelimiter1, portDataBits=portDataBits, relayOutputEthernet1LinkDown=relayOutputEthernet1LinkDown, deviceControlEntry=deviceControlEntry, routeTable=routeTable, monitorDCD=monitorDCD, portBufferingSDFileSize=portBufferingSDFileSize, serverName=serverName, socketTcpClientDestinationPort2=socketTcpClientDestinationPort2, portDelimiter2=portDelimiter2, printerGroup=printerGroup, terminalAuthenticationType=terminalAuthenticationType, autoWarningSettings=autoWarningSettings, terminalSecondHostAddress=terminalSecondHostAddress, terminalAutoLoginPrompt=terminalAutoLoginPrompt, reverseTerminalTcpPort=reverseTerminalTcpPort, socketUdpDestinationPort4=socketUdpDestinationPort4, hostTableEntry=hostTableEntry, terminalTelnetTcpPort=terminalTelnetTcpPort, reverseTerminalTable=reverseTerminalTable, printerTable=printerTable, portDCDChangedStatusEntry=portDCDChangedStatusEntry, opModePortEntry=opModePortEntry, ethernet1LinkDownAcknowledge=ethernet1LinkDownAcknowledge, mailWarningAuthFailure=mailWarningAuthFailure, activeAccessibleIpList=activeAccessibleIpList, socketUdpDestinationPort3=socketUdpDestinationPort3, reverseTerminal=reverseTerminal, portPhoneNumber=portPhoneNumber, socketUdpDestinationPort2=socketUdpDestinationPort2, serialPortStatus=serialPortStatus, sysStatus=sysStatus, mailWarningIpChanged=mailWarningIpChanged, dialIncomingPAPCheck=dialIncomingPAPCheck, socketUdpLocalListenPort=socketUdpLocalListenPort, portBaudRateManual=portBaudRateManual, monitorTxCount=monitorTxCount, reverseTerminalAuthenticationType=reverseTerminalAuthenticationType, dialSLIPDMode=dialSLIPDMode, portEnableWelcomeMessage=portEnableWelcomeMessage, accessibleIpListAddress=accessibleIpListAddress, reverseTerminalInactivityTime=reverseTerminalInactivityTime, miscNetworkSettings=miscNetworkSettings, ethernetModemTable=ethernetModemTable, pppoeUserAccount=pppoeUserAccount, terminalBreak=terminalBreak, networkSetting=networkSetting, accessibleIpListIndex=accessibleIpListIndex, dDNS=dDNS, monitorErrorCountOverrun=monitorErrorCountOverrun, dataPackingPortEntry=dataPackingPortEntry, portApplication=portApplication, snmpAgentSettings=snmpAgentSettings, restartPorts=restartPorts, dataPacking=dataPacking, monitorErrorCountFrame=monitorErrorCountFrame, relayOutputStatus=relayOutputStatus, portBufferingLocation=portBufferingLocation, emailWarningMailServer=emailWarningMailServer, socketUdpDestinationAddress3End=socketUdpDestinationAddress3End, pairConnectionTcpAliveCheck=pairConnectionTcpAliveCheck, sysLocalLog=sysLocalLog, mailDCDchange=mailDCDchange, trapServerColdStart=trapServerColdStart, socketTcpClientDestinationPort1=socketTcpClientDestinationPort1, terminalPrimaryHostAddress=terminalPrimaryHostAddress, portDCDChangedStatusTable=portDCDChangedStatusTable, socket=socket, deviceControlAllowDriverControl=deviceControlAllowDriverControl, emailWarningSecondEmailAddr=emailWarningSecondEmailAddr, metricRouteTable=metricRouteTable, routeTableIndex=routeTableIndex, portEnableModem=portEnableModem, emailWarningPassword=emailWarningPassword, deviceControl=deviceControl, terminalTable=terminalTable, monitorRTSCTSFlowControl=monitorRTSCTSFlowControl, httpsConsole=httpsConsole, alarmServerEthernet1LinkDown=alarmServerEthernet1LinkDown, dialOutgoingPAPID=dialOutgoingPAPID, winsServer=winsServer, consoleSettings=consoleSettings, portSetting=portSetting, portAlias=portAlias, monitorRxTotalCount=monitorRxTotalCount, relayOutputEthernet3LinkDown=relayOutputEthernet3LinkDown, monitorSerialPortSettingsTable=monitorSerialPortSettingsTable, opModeSetting=opModeSetting, deviceControlTcpPort=deviceControlTcpPort, sshConsole=sshConsole, monitorErrorCountBreak=monitorErrorCountBreak, emailRequiresAuthentication=emailRequiresAuthentication, deviceControlIgnoreJammedIp=deviceControlIgnoreJammedIp, monitorErrorCountParity=monitorErrorCountParity, terminal=terminal, trapServerWarmStart=trapServerWarmStart, dnsServer1IpAddr=dnsServer1IpAddr, trapVersion=trapVersion, monitorTxTotalCount=monitorTxTotalCount, dataBufferingPortTable=dataBufferingPortTable, socketTcpAliveCheck=socketTcpAliveCheck, application=application, dialDestinationIpAddress=dialDestinationIpAddress, welcomeMessage=welcomeMessage, printerEntry=printerEntry, deviceControlTcpAliveCheck=deviceControlTcpAliveCheck, pairConnectionSecure=pairConnectionSecure, defaultGateway=defaultGateway, socketTcpClientDestinationAddress4=socketTcpClientDestinationAddress4, monitorSerialPortSettingsEntry=monitorSerialPortSettingsEntry, authenticationServer=authenticationServer, socketMaxConnection=socketMaxConnection, dialEntry=dialEntry, snmpTrap=snmpTrap, socketEntry=socketEntry, hostTableIndex=hostTableIndex, userTable=userTable, socketUdpDestinationAddress4End=socketUdpDestinationAddress4End, modemSettingsPortTable=modemSettingsPortTable, modemSettings=modemSettings, swMgmt=swMgmt, socketTcpServerConnectionDownDTR=socketTcpServerConnectionDownDTR, portSerialDataLoggingEnable=portSerialDataLoggingEnable, s2eConnections=s2eConnections, ethernet2LinkDownAcknowledge=ethernet2LinkDownAcknowledge, portFlowControl=portFlowControl, accessibleIpListNetmask=accessibleIpListNetmask, resetButtonFunction=resetButtonFunction, radiusServerIp=radiusServerIp, macAddress=macAddress, httpConsole=httpConsole, mailWarningPasswordChanged=mailWarningPasswordChanged, snmpContactName=snmpContactName, serialNumber=serialNumber, dataBufferingPortEntry=dataBufferingPortEntry, monitorRemoteIpTable=monitorRemoteIpTable, userTableEntry=userTableEntry, dDNSEnable=dDNSEnable, interfaceRouteTable=interfaceRouteTable, printerQueueNameRaw=printerQueueNameRaw, comParamSetting=comParamSetting, dialPAPPassword=dialPAPPassword, ethernetModemTcpAliveCheck=ethernetModemTcpAliveCheck, socketTcpClientDesignatedLocalPort1=socketTcpClientDesignatedLocalPort1, dialAuthType=dialAuthType, deviceControlMaxConnection=deviceControlMaxConnection, monitorBaudRate=monitorBaudRate, dialTcpIpCompression=dialTcpIpCompression, restartSystem=restartSystem, monitorDataBits=monitorDataBits, eventSettings=eventSettings, dDNSHostName=dDNSHostName, portParity=portParity, deviceControlTable=deviceControlTable, dialTable=dialTable, terminalEntry=terminalEntry, terminalInterrupt=terminalInterrupt, modemSettingsPortEntry=modemSettingsPortEntry, netmaskRouteTable=netmaskRouteTable, pppoePassword=pppoePassword, dialIpNetmask=dialIpNetmask, hostIpAddress=hostIpAddress, terminalInactivityTime=terminalInactivityTime, socketUdpDestinationAddress4Begin=socketUdpDestinationAddress4Begin, portFIFO=portFIFO, relayOutputEthernet2LinkDown=relayOutputEthernet2LinkDown, sysIpAddress=sysIpAddress, monitorInterface=monitorInterface, gratuitousArp=gratuitousArp, trapDCDchange=trapDCDchange, portEventSettingsTable=portEventSettingsTable, monitorSerialPortErrorCountEntry=monitorSerialPortErrorCountEntry, serialEventSettings=serialEventSettings, telnetConsole=telnetConsole, pairConnectionTcpPort=pairConnectionTcpPort, portDelimiter1Enable=portDelimiter1Enable, hostName=hostName, terminalQuit=terminalQuit, emailWarningFirstEmailAddr=emailWarningFirstEmailAddr, ethernet3LinkDownAcknowledge=ethernet3LinkDownAcknowledge, saveConfiguration=saveConfiguration, userNameUserTable=userNameUserTable, trapServerAuthFailure=trapServerAuthFailure, reverseTerminalEntry=reverseTerminalEntry, mailDSRchange=mailDSRchange, serverLocation=serverLocation, dDNSUserName=dDNSUserName, monitorParity=monitorParity, overview=overview, socketUdpDestinationAddress2End=socketUdpDestinationAddress2End, ethernetModemEntry=ethernetModemEntry, passwordUserTable=passwordUserTable, localTime=localTime, socketTcpClientDesignatedLocalPort4=socketTcpClientDesignatedLocalPort4, portDSRChangedAcknowledge=portDSRChangedAcknowledge, socketCmdPort=socketCmdPort, opModePortTable=opModePortTable, socketTcpClientDesignatedLocalPort3=socketTcpClientDesignatedLocalPort3, monitorRemoteIp=monitorRemoteIp, socketTcpClientDestinationAddress2=socketTcpClientDestinationAddress2, accessibleIpListTable=accessibleIpListTable, terminalLoginUserName=terminalLoginUserName, terminalPasswordPrompt=terminalPasswordPrompt, saveConfig=saveConfig, socketTable=socketTable, portPacketLength=portPacketLength, monitorRTS=monitorRTS, portDialUp=portDialUp, portDCDChangedStatus=portDCDChangedStatus, timeSetting=timeSetting, dial=dial, restart=restart, deviceControlSecure=deviceControlSecure, maintenance=maintenance, netMask=netMask, serverSetting=serverSetting, emailWarningFourthEmailAddr=emailWarningFourthEmailAddr, opModeLocalLog=opModeLocalLog, routingProtocol=routingProtocol, pairConnection=pairConnection, routeTableTable=routeTableTable)
mibBuilder.exportSymbols("MOXA-NP6000-MIB", userTableTable=userTableTable, monitorSerialPortStatusEntry=monitorSerialPortStatusEntry, monitorDSR=monitorDSR, monitorSerialPortStatusTable=monitorSerialPortStatusTable, terminalLoginPassword=terminalLoginPassword, dataPackingPortTable=dataPackingPortTable, configLocalLog=configLocalLog, monitorRxCount=monitorRxCount, trapDSRchange=trapDSRchange, reverseTerminalTcpAliveCheck=reverseTerminalTcpAliveCheck, portIndex=portIndex, moxa=moxa, enableAccessibleIpList=enableAccessibleIpList, lan1Speed=lan1Speed, mailWarningColdStart=mailWarningColdStart, emailWarningFromEmail=emailWarningFromEmail, portMode=portMode, gratuitousArpSendPeriod=gratuitousArpSendPeriod, dataBuffering=dataBuffering, pairConnectionEntry=pairConnectionEntry, firmwareVersion=firmwareVersion, dialLinkQualityReport=dialLinkQualityReport, gatewayRouteTable=gatewayRouteTable, alarmDCDchange=alarmDCDchange, portInterface=portInterface, destinationRouteTable=destinationRouteTable, socketTcpClientDestinationPort4=socketTcpClientDestinationPort4, socketAllowDriverControl=socketAllowDriverControl, portEventSettingsEntry=portEventSettingsEntry, opMode=opMode, dialSourceIpAddress=dialSourceIpAddress, viewLanModuleSpeed=viewLanModuleSpeed, printerTcpAliveCheck=printerTcpAliveCheck, portDSRChangedStatusTable=portDSRChangedStatusTable, printerTcpPort=printerTcpPort, snmpEnable=snmpEnable, sysLogSettings=sysLogSettings, radiusKey=radiusKey, portMessage=portMessage, portBaudRate=portBaudRate, portDCDChangedAcknowledge=portDCDChangedAcknowledge, socketUdpDestinationPort1=socketUdpDestinationPort1, socketTcpPort=socketTcpPort, sysManagement=sysManagement, comParamPortTable=comParamPortTable, emailAlert=emailAlert, socketTcpClientDestinationAddress1=socketTcpClientDestinationAddress1, timeZone=timeZone, socketUdpDestinationAddress2Begin=socketUdpDestinationAddress2Begin, snmpLocation=snmpLocation, comParamPortEntry=comParamPortEntry, serialPortErrorCount=serialPortErrorCount, accessibleIp=accessibleIp, mailWarningWarmStart=mailWarningWarmStart, moduleType=moduleType, alarmServerEthernet2LinkDown=alarmServerEthernet2LinkDown, terminalSshTcpPort=terminalSshTcpPort, socketTcpServerConnectionDownRTS=socketTcpServerConnectionDownRTS, PYSNMP_MODULE_ID=np6000, portDelimiter2Enable=portDelimiter2Enable, emailWarningUserName=emailWarningUserName, terminalChangeSession=terminalChangeSession, hostTableTable=hostTableTable, phoneNumberUserTable=phoneNumberUserTable, snmpTrapReceiverIp=snmpTrapReceiverIp, socketTcpClientDestinationPort3=socketTcpClientDestinationPort3, portForceTransmit=portForceTransmit, alarmServerEthernet3LinkDown=alarmServerEthernet3LinkDown, monitorSerialPortErrorCountTable=monitorSerialPortErrorCountTable, networkLocalLog=networkLocalLog, reverseTerminalMapKeys=reverseTerminalMapKeys, basicSetting=basicSetting, printer=printer, pairConnectionDestinationPort=pairConnectionDestinationPort, viewLanSpeed=viewLanSpeed, portDSRChangedStatus=portDSRChangedStatus, timeServer=timeServer, loadFactoryDefault=loadFactoryDefault, socketTcpClientDesignatedLocalPort2=socketTcpClientDesignatedLocalPort2, ipConfiguration=ipConfiguration, np6000=np6000, udpPortAuthenticationServer=udpPortAuthenticationServer, socketTcpClientConnectionControl=socketTcpClientConnectionControl, dialTERMBINMode=dialTERMBINMode, ethernetModem=ethernetModem, portInitialString=portInitialString, monitorFIFO=monitorFIFO, monitorStopBits=monitorStopBits, modelName=modelName, deviceControlConnectionDownRTS=deviceControlConnectionDownRTS, terminalTcpAliveCheck=terminalTcpAliveCheck, monitorCTS=monitorCTS, printerQueueNameASCII=printerQueueNameASCII, socketUdpDestinationAddress1End=socketUdpDestinationAddress1End, monitorDTR=monitorDTR, alarmDSRchange=alarmDSRchange, socketIgnoreJammedIp=socketIgnoreJammedIp, ethernetModemTcpPort=ethernetModemTcpPort, serialPortSettings=serialPortSettings, dDNSPassword=dDNSPassword, dialInactivityTime=dialInactivityTime, dialDisconnectBy=dialDisconnectBy, hostTable=hostTable, dialPPPDMode=dialPPPDMode)
|
py | b416f89af55763af84295948a09bb7e5702989c1 | # -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,os
try:
from sqlite3 import dbapi2 as database
except:
from pysqlite2 import dbapi2 as database
from resources.lib.libraries import control
def addView(content):
try:
skin = control.skin
skinPath = control.skinPath
xml = os.path.join(skinPath,'addon.xml')
file = control.openFile(xml)
read = file.read().replace('\n','')
file.close()
try: src = re.compile('defaultresolution="(.+?)"').findall(read)[0]
except: src = re.compile('<res.+?folder="(.+?)"').findall(read)[0]
src = os.path.join(skinPath, src)
src = os.path.join(src, 'MyVideoNav.xml')
file = control.openFile(src)
read = file.read().replace('\n','')
file.close()
views = re.compile('<views>(.+?)</views>').findall(read)[0]
views = [int(x) for x in views.split(',')]
for view in views:
label = control.infoLabel('Control.GetLabel(%s)' % (view))
if not (label == '' or label == None): break
record = (skin, content, str(view))
control.makeFile(control.dataPath)
dbcon = database.connect(control.databaseFile)
dbcur = dbcon.cursor()
dbcur.execute("CREATE TABLE IF NOT EXISTS views (""skin TEXT, ""view_type TEXT, ""view_id TEXT, ""UNIQUE(skin, view_type)"");")
dbcur.execute("DELETE FROM views WHERE skin = '%s' AND view_type = '%s'" % (record[0], record[1]))
dbcur.execute("INSERT INTO views Values (?, ?, ?)", record)
dbcon.commit()
viewName = control.infoLabel('Container.Viewmode')
control.infoDialog(control.lang(30491).encode('utf-8'), heading=viewName)
except:
return
def setView(content, viewDict=None):
for i in range(0, 200):
if control.condVisibility('Container.Content(%s)' % content):
try:
skin = control.skin
record = (skin, content)
dbcon = database.connect(control.databaseFile)
dbcur = dbcon.cursor()
dbcur.execute("SELECT * FROM views WHERE skin = '%s' AND view_type = '%s'" % (record[0], record[1]))
view = dbcur.fetchone()
view = view[2]
if view == None: raise Exception()
return control.execute('Container.SetViewMode(%s)' % str(view))
except:
try: return control.execute('Container.SetViewMode(%s)' % str(viewDict[skin]))
except: return
control.sleep(100)
|
py | b416f89ff72b53ab3b84a7dcca8820acea87a7a2 | #!/usr/bin/env python
# -*-coding: utf-8 -*-
import logging
import time
import subprocess
import zmq
__author__ = "Sven Zehl, Anatolij Zubow"
__copyright__ = "Copyright (c) 2016, Technische Universität Berlin"
__version__ = "1.0.0"
__email__ = "{zehl, zubow}@tkn.tu-berlin.de"
"""
Class for controlling the hybrid TDMA/CSMA MAC.
"""
class HybridTDMACSMAMac(object):
def __init__(self, log, interface, no_slots_in_superframe, slot_duration_ns,
hmac_binary_path='hmac_userspace_daemon/hmac_userspace_daemon',
local_mac_processor_port=1217):
'''
The configuration of such a MAC is described by:
:param interface: the wireless interface on which we want to install the HMAC
:param no_slots_in_superframe: the total number of slots in a superframe
:param slot_duration_ns: the time duration of each slot (microseconds)
:param hmac_binary_path: path to the C++ userland HMAC daemon
:param local_mac_processor_port: ZeroMQ port used for communication with HMAC daemon
'''
self.log = log
self.interface = interface
self.mNo_slots_in_superframe = no_slots_in_superframe
self.mSlot_duration_ns = slot_duration_ns
self.acs = []
for ii in range(no_slots_in_superframe):
self.acs.append(None)
# path to the HMAC C++ userland daemon
self.hmac_binary_path = hmac_binary_path
self.local_mac_processor_port = local_mac_processor_port
self.state = MACState.NOT_RUNNING
def getInterface(self):
'''
Returns the wireless interface
'''
return self.interface
def getNumSlots(self):
'''
Get the total number of slots in superframe
'''
return self.mNo_slots_in_superframe
def setAccessPolicy(self, slot_nr, ac):
'''
Sets an access policy to a given slot in the superframe
:param slot_nr: the slot id to which the access policy to apply
:param ac: the access policy (AccessPolicy class)
:return: True if correct
'''
if slot_nr >= 0 and slot_nr < len(self.acs):
self.acs[slot_nr] = ac
return True
else:
return False
def getAccessPolicy(self, slot_nr):
'''
Get the access policy assigned to given slot.
:param slot_nr: ID starting from 0.
:return: AccessPolicy object
'''
if slot_nr >= 0 and slot_nr < len(self.acs):
return self.acs[slot_nr]
else:
return None
def removeAccessPolicy(self, slot_nr):
'''
Removes the access policy assigned to given slot.
:param slot_nr: ID starting from 0.
:return: True
'''
if slot_nr >= 0 and slot_nr < len(self.acs):
self.acs[slot_nr] = None
return True
else:
return False
def getSlotDuration(self):
'''
Get time duration of a slot
'''
return self.mSlot_duration_ns
def printConfiguration(self):
'''
Return the MAC configuration serialized as string.
:return:
'''
self.log.info('[')
for ii in range(self.getNumSlots()):
nline = str(ii) + ': ' + self.getAccessPolicy(ii).printConfiguration()
self.log.info(nline)
self.log.info(']')
def install_mac_processor(self):
'''
Installs the given hybrid MAC configuration
:return: True if successful
'''
self.log.debug('install_mac_processor()')
if self.state == MACState.RUNNING:
self.log.warn('HMAC is already running; use update_mac_processor() to update at run-time')
return False
try:
# 1. create HMAC configuration string
conf_str = self._create_configuration_string()
# construct command argument for HMAC daemon
processArgs = str(self.hmac_binary_path) + " -d 0 " + " -i" + str(self.interface) \
+ " -f" + str(self.getSlotDuration()) + " -n" + str(self.getNumSlots()) + " -c" + conf_str
self.log.debug('Starting HMAC daemon with: %s' % processArgs)
# start HMAC daemon as a background process
subprocess.Popen(processArgs.split(), shell=False)
self.hmac_ctrl_socket = None
self.state = MACState.RUNNING
return True
except Exception as e:
self.log.fatal("An error occurred while starting HMAC daemon, err_msg: %s" % str(e))
return False
def update_mac_processor(self):
'''
Updates the given hybrid MAC configuration at run-time with new configuration
:return: True if successful
'''
self.log.debug('update_mac_processor()')
if self.state == MACState.NOT_RUNNING:
self.log.info('HMAC is not yet running running; start it')
return self.install_mac_processor()
try:
# 1. create HMAC configuration string
conf_str = self._create_configuration_string()
if self.hmac_ctrl_socket is None:
context = zmq.Context()
self.hmac_ctrl_socket = context.socket(zmq.REQ)
self.hmac_ctrl_socket.connect("tcp://localhost:" + str(self.local_mac_processor_port))
# update MAC processor configuration
self.log.info("Send ctrl req message to HMAC: %s" % conf_str)
self.hmac_ctrl_socket.send(conf_str)
message = self.hmac_ctrl_socket.recv()
self.log.info("Received ctrl reply message from HMAC: %s" % message)
return True
except zmq.ZMQError as e:
self.log.fatal("Failed to update running HMAC daemon, err_msg: %s" % str(e))
return False
def uninstall_mac_processor(self):
'''
Uninstalls the running hybrid MAC
:return: True if successful
'''
self.log.debug('uninstall_mac_processor')
if self.state == MACState.NOT_RUNNING:
self.log.warn('HMAC is already stopped')
return True
try:
# set allow all configuration string
conf_str = self._create_allow_all_conf_string()
# command string
terminate_str = 'TERMINATE'
if self.hmac_ctrl_socket is None:
context = zmq.Context()
self.hmac_ctrl_socket = context.socket(zmq.REQ)
self.hmac_ctrl_socket.connect("tcp://localhost:" + str(self.local_mac_processor_port))
# update MAC processor configuration
self.log.info("Send ctrl req message to HMAC: %s" % conf_str)
self.hmac_ctrl_socket.send(conf_str)
message = self.hmac_ctrl_socket.recv()
self.log.info("Received ctrl reply from HMAC: %s" % message)
# give one second to settle down
time.sleep(1)
# send termination signal to MAC
self.hmac_ctrl_socket.send(terminate_str)
message = self.hmac_ctrl_socket.recv()
self.log.info("Received ctrl reply from HMAC: %s" % message)
self.state = MACState.NOT_RUNNING
return True
except zmq.ZMQError as e:
self.log.fatal("Failed to uninstall MAC processor %s" % str(e))
return False
''' Helper '''
def _create_configuration_string(self):
conf_str = None
for ii in range(self.getNumSlots()): # for each slot
ac = self.getAccessPolicy(ii)
entries = ac.getEntries()
for ll in range(len(entries)):
entry = entries[ll]
# slot_id, mac_addr, tid_mask
if conf_str is None:
conf_str = str(ii) + "," + str(entry[0]) + "," + str(entry[1])
else:
conf_str = conf_str + "#" + str(ii) + "," + str(entry[0]) + "," + str(entry[1])
return conf_str
''' Helper '''
def _create_allow_all_conf_string(self):
# generate configuration string
conf_str = None
for ii in range(self.getNumSlots()): # for each slot
# slot_id, mac_addr, tid_mask
if conf_str is None:
conf_str = str(ii) + "," + 'FF:FF:FF:FF:FF:FF' + "," + str(255)
else:
conf_str = conf_str + "#" + str(ii) + "," + 'FF:FF:FF:FF:FF:FF' + "," + str(255)
return conf_str
class MACState:
RUNNING, NOT_RUNNING = range(2)
"""
Class for controlling the access policy of each time slot using the destination MAC address and IP ToS value.
"""
class AccessPolicy(object):
def __init__(self):
self.entries = []
def disableAll(self):
'''
Block usage of time slot for all packets
'''
self.entries = []
def allowAll(self):
'''
Unblock usage of time slot for all packets
'''
self.entries = []
self.entries.append(('FF:FF:FF:FF:FF:FF', 255))
def addDestMacAndTosValues(self, dstHwAddr, *tosArgs):
"""Add destination mac address and list of ToS fields which is allowed to be transmitted in this time slot
:param dstHwAddr: destination mac address
:param tosArgs: list of ToS values to be allowed here
"""
tid_map = 0
for ii in range(len(tosArgs)):
# convert ToS into tid
tos = tosArgs[ii]
skb_prio = tos & 30 >> 1
tid =skb_prio & 7
tid_map = tid_map | 2**tid
self.entries.append((dstHwAddr, tid_map))
def getEntries(self):
'''
Get saved entries
'''
return self.entries
def printConfiguration(self):
'''
For debugging
'''
s = ''
for ii in range(len(self.entries)):
s = str(self.entries[ii][0]) + "/" + str(self.entries[ii][1]) + "," + s
return s
|
py | b416f97268347f8207fbcfb94b1ec826c64efc6f | def get_parameters(intcode_list, raw, relative_base, mode):
if mode == 0:
return intcode_list[raw]
if mode == 1:
return raw
if mode == 2:
return intcode_list[raw + relative_base]
def get_write_parameters(intcode_list, raw, relative_base, mode):
if mode == 0:
return raw
if mode == 1:
return None
if mode == 2:
return raw + relative_base
def full_intcode_computer(ram, pointer, rb):
relative_base = rb
my_input = []
counter = pointer
if len(ram) < 100000:
pre_buffer = [0 for x in range(10000)]
ram.extend(pre_buffer)
while True:
try:
item = ram[counter]
ones = int(str(item)[-1])
tens = int(str(item // 10)[-1])
hundreds = int(str(item // 100)[-1])
thousands = int(str(item // 1000)[-1])
ten_thousands = int(str(item // 10000)[-1])
if ones == 1:
raw1 = ram[counter + 1]
raw2 = ram[counter + 2]
raw3 = ram[counter + 3]
param1 = get_parameters(ram, raw1, relative_base, hundreds)
param2 = get_parameters(ram, raw2, relative_base, thousands)
param3 = get_write_parameters(ram, raw3, relative_base, ten_thousands)
position = param3
ram[position] = param1 + param2
counter += 4
continue
if ones == 2:
raw1 = ram[counter + 1]
raw2 = ram[counter + 2]
raw3 = ram[counter + 3]
param1 = get_parameters(ram, raw1, relative_base, hundreds)
param2 = get_parameters(ram, raw2, relative_base, thousands)
param3 = get_write_parameters(ram, raw3, relative_base, ten_thousands)
position = param3
ram[position] = param1 * param2
counter += 4
continue
if ones == 3:
raw1 = ram[counter + 1]
param1 = get_write_parameters(ram, raw1, relative_base, hundreds)
if not my_input:
my_input = yield
# if isinstance(my_input, list):
# my_input = my_input.pop(0)
ram[param1] = my_input.pop(0)
counter += 2
continue
if ones == 4:
raw1 = ram[counter + 1]
param1 = get_parameters(ram, raw1, relative_base, hundreds)
counter += 2
yield param1
continue
if ones == 5:
raw1 = ram[counter + 1]
raw2 = ram[counter + 2]
param1 = get_parameters(ram, raw1, relative_base, hundreds)
param2 = get_parameters(ram, raw2, relative_base, thousands)
if param1 == 0:
counter += 3
continue
else:
counter = param2
continue
if ones == 6:
raw1 = ram[counter + 1]
raw2 = ram[counter + 2]
param1 = get_parameters(ram, raw1, relative_base, hundreds)
param2 = get_parameters(ram, raw2, relative_base, thousands)
if param1 != 0:
counter += 3
continue
else:
counter = param2
continue
if ones == 7:
raw1 = ram[counter + 1]
raw2 = ram[counter + 2]
raw3 = ram[counter + 3]
param1 = get_parameters(ram, raw1, relative_base, hundreds)
param2 = get_parameters(ram, raw2, relative_base, thousands)
param3 = get_write_parameters(ram, raw3, relative_base, ten_thousands)
position = param3
if param1 < param2:
ram[position] = 1
else:
ram[position] = 0
counter += 4
continue
if ones == 8:
raw1 = ram[counter + 1]
raw2 = ram[counter + 2]
raw3 = ram[counter + 3]
param1 = get_parameters(ram, raw1, relative_base, hundreds)
param2 = get_parameters(ram, raw2, relative_base, thousands)
param3 = get_write_parameters(ram, raw3, relative_base, ten_thousands)
position = param3
if param1 == param2:
ram[position] = 1
else:
ram[position] = 0
counter += 4
continue
if ones == 9 and tens == 9:
return "Complete"
if ones == 9:
raw1 = ram[counter + 1]
param1 = get_parameters(ram, raw1, relative_base, hundreds)
relative_base += param1
counter += 2
continue
except Exception as e:
print(counter)
def get_intcode(test, i, j):
test[1] = i
test[2] = j
for tranche in [(test[x:x + 4]) for x in range(0, len(test) - 3, 4)]:
if tranche[0] == 1:
test[tranche[3]] = test[tranche[1]] + test[tranche[2]]
if tranche[0] == 2:
test[tranche[3]] = test[tranche[1]] * test[tranche[2]]
if tranche[0] == 99:
return test[0]
def intcode_computer(intcode_list, input):
counter = 0
try:
while True:
item = intcode_list[counter]
ones = int(str(item)[-1])
tens = int(str(item // 10)[-1])
hundreds = int(str(item // 100)[-1])
thousands = int(str(item // 1000)[-1])
if ones == 1:
if hundreds == 1:
param1 = intcode_list[counter + 1]
else:
position = intcode_list[counter + 1]
param1 = intcode_list[position]
if thousands == 1:
param2 = intcode_list[counter + 2]
else:
param2 = intcode_list[intcode_list[counter + 2]]
intcode_list[intcode_list[counter + 3]] = param1 + param2
counter += 4
if ones == 2:
if hundreds == 1:
param1 = intcode_list[counter + 1]
else:
param1 = intcode_list[intcode_list[counter + 1]]
if thousands == 1:
param2 = intcode_list[counter + 2]
else:
param2 = intcode_list[intcode_list[counter + 2]]
intcode_list[intcode_list[counter + 3]] = param1 * param2
counter += 4
if ones == 3:
param1 = intcode_list[counter + 1]
intcode_list[param1] = input
counter += 2
if ones == 4:
if hundreds == 1:
param1 = intcode_list[counter + 1]
else:
param1 = intcode_list[intcode_list[counter + 1]]
print(param1)
counter += 2
if ones == 5:
if hundreds == 1:
param1 = intcode_list[counter + 1]
else:
param1 = intcode_list[intcode_list[counter + 1]]
if thousands == 1:
param2 = intcode_list[counter + 2]
else:
param2 = intcode_list[intcode_list[counter + 2]]
if param1 == 0:
counter += 3
else:
counter = param2
if ones == 6:
if hundreds == 1:
param1 = intcode_list[counter + 1]
else:
param1 = intcode_list[intcode_list[counter + 1]]
if thousands == 1:
param2 = intcode_list[counter + 2]
else:
param2 = intcode_list[intcode_list[counter + 2]]
if param1 != 0:
counter += 3
else:
counter = param2
if ones == 7:
if hundreds == 1:
param1 = intcode_list[counter + 1]
else:
param1 = intcode_list[intcode_list[counter + 1]]
if thousands == 1:
param2 = intcode_list[counter + 2]
else:
param2 = intcode_list[intcode_list[counter + 2]]
if param1 < param2:
intcode_list[intcode_list[counter + 3]] = 1
else:
intcode_list[intcode_list[counter + 3]] = 0
counter += 4
if ones == 8:
if hundreds == 1:
param1 = intcode_list[counter + 1]
else:
param1 = intcode_list[intcode_list[counter + 1]]
if thousands == 1:
param2 = intcode_list[counter + 2]
else:
param2 = intcode_list[intcode_list[counter + 2]]
if param1 == param2:
intcode_list[intcode_list[counter + 3]] = 1
else:
intcode_list[intcode_list[counter + 3]] = 0
counter += 4
if ones == 9 and tens == 9:
print("Complete")
break
except:
print(counter) |
py | b416fa24c188a9eb2601d1ad0b0cdd2df73bc51b | from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import os
import pandas as pd
from mimic3benchmark.util import dataframe_from_csv
def read_stays(subject_path):
stays = dataframe_from_csv(os.path.join(subject_path, 'stays.csv'), index_col=None)
stays.INTIME = pd.to_datetime(stays.INTIME)
stays.OUTTIME = pd.to_datetime(stays.OUTTIME)
stays.DOB = pd.to_datetime(stays.DOB)
stays.DOD = pd.to_datetime(stays.DOD)
stays.DEATHTIME = pd.to_datetime(stays.DEATHTIME)
stays.sort_values(by=['INTIME', 'OUTTIME'], inplace=True)
return stays
def read_diagnoses(subject_path):
return dataframe_from_csv(os.path.join(subject_path, 'diagnoses.csv'), index_col=None)
def read_events(subject_path, remove_null=True):
events = dataframe_from_csv(os.path.join(subject_path, 'events.csv'), index_col=None)
if remove_null:
events = events[events.VALUE.notnull()]
events.CHARTTIME = pd.to_datetime(events.CHARTTIME)
events.HADM_ID = events.HADM_ID.fillna(value=-1).astype(int)
events.ICUSTAY_ID = events.ICUSTAY_ID.fillna(value=-1).astype(int)
events.VALUEUOM = events.VALUEUOM.fillna('').astype(str)
# events.sort_values(by=['CHARTTIME', 'ITEMID', 'ICUSTAY_ID'], inplace=True)
return events
def get_events_for_stay(events, icustayid, intime=None, outtime=None):
idx = (events.ICUSTAY_ID == icustayid)
if intime is not None and outtime is not None:
idx = idx | ((events.CHARTTIME >= intime) & (events.CHARTTIME <= outtime))
events = events[idx]
del events['ICUSTAY_ID']
return events
def add_hours_elpased_to_events(events, dt, remove_charttime=True):
events = events.copy()
events['HOURS'] = (events.CHARTTIME - dt).apply(lambda s: s / np.timedelta64(1, 's')) / 60./60
if remove_charttime:
del events['CHARTTIME']
return events
def convert_events_to_timeseries(events, variable_column='VARIABLE', variables=[]):
metadata = events[['CHARTTIME', 'ICUSTAY_ID']].sort_values(by=['CHARTTIME', 'ICUSTAY_ID'])\
.drop_duplicates(keep='first').set_index('CHARTTIME')
timeseries = events[['CHARTTIME', variable_column, 'VALUE']]\
.sort_values(by=['CHARTTIME', variable_column, 'VALUE'], axis=0)\
.drop_duplicates(subset=['CHARTTIME', variable_column], keep='last')
timeseries = timeseries.pivot(index='CHARTTIME', columns=variable_column, values='VALUE')\
.merge(metadata, left_index=True, right_index=True)\
.sort_index(axis=0).reset_index()
for v in variables:
if v not in timeseries:
timeseries[v] = np.nan
return timeseries
def get_first_valid_from_timeseries(timeseries, variable):
if variable in timeseries:
idx = timeseries[variable].notnull()
if idx.any():
loc = np.where(idx)[0][0]
return timeseries[variable].iloc[loc]
return np.nan
|
py | b416fb001487bdd815ed8cde5118f6bcda9af4ea | from kraken.core.maths import Vec3, Vec3, Euler, Quat, Xfo
from kraken.core.objects.components.base_example_component import BaseExampleComponent
from kraken.core.objects.attributes.attribute_group import AttributeGroup
from kraken.core.objects.attributes.scalar_attribute import ScalarAttribute
from kraken.core.objects.attributes.bool_attribute import BoolAttribute
from kraken.core.objects.constraints.pose_constraint import PoseConstraint
from kraken.core.objects.component_group import ComponentGroup
from kraken.core.objects.hierarchy_group import HierarchyGroup
from kraken.core.objects.locator import Locator
from kraken.core.objects.joint import Joint
from kraken.core.objects.ctrlSpace import CtrlSpace
from kraken.core.objects.control import Control
from kraken.core.objects.operators.splice_operator import SpliceOperator
from kraken.core.profiler import Profiler
from kraken.helpers.utility_methods import logHierarchy
class MainSrtComponent(BaseExampleComponent):
"""MainSrt Component Base"""
def __init__(self, name='mainSrtBase', parent=None, data=None):
super(MainSrtComponent, self).__init__(name, parent)
# ===========
# Declare IO
# ===========
# Declare Inputs Xfos
# Declare Output Xfos
self.srtOutputTgt = self.createOutput('srt', dataType='Xfo', parent=self.outputHrcGrp).getTarget()
self.offsetOutputTgt = self.createOutput('offset', dataType='Xfo', parent=self.outputHrcGrp).getTarget()
# Declare Input Attrs
self.drawDebugInputAttr = self.createInput('drawDebug', dataType='Boolean', value=False, parent=self.cmpInputAttrGrp).getTarget()
# Declare Output Attrs
self.rigScaleOutputAttr = self.createOutput('rigScale', dataType='Float', value=1.0, parent=self.cmpOutputAttrGrp).getTarget()
class MainSrtComponentGuide(MainSrtComponent):
"""MainSrt Component Guide"""
def __init__(self, name='mainSrt', parent=None, data=None):
Profiler.getInstance().push("Construct MainSrt Guide Component:" + name)
super(MainSrtComponentGuide, self).__init__(name, parent)
# =========
# Attributes
# =========
# Add Component Params to IK control
guideSettingsAttrGrp = AttributeGroup("GuideSettings", parent=self)
self.mainSrtSizeInputAttr = ScalarAttribute('mainSrtSize', value=5.0, minValue=1.0, maxValue=50.0, parent=guideSettingsAttrGrp)
# =========
# Controls
# =========
# Guide Controls
self.mainSrtCtrl = Control('mainSrt', parent=self.ctrlCmpGrp, shape="circle")
if data is None:
data = {
"location": 'M',
"mainSrtSize": self.mainSrtSizeInputAttr.getValue(),
"mainSrtXfo": Xfo(tr=Vec3(0.0, 0.0, 0.0))
}
self.loadData(data)
Profiler.getInstance().pop()
# =============
# Data Methods
# =============
def saveData(self):
"""Save the data for the component to be persisted.
Return:
The JSON data object
"""
data = super(MainSrtComponentGuide, self).saveData()
data["mainSrtSize"] = self.mainSrtSizeInputAttr.getValue()
data["mainSrtXfo"] = self.mainSrtCtrl.xfo
return data
def loadData(self, data):
"""Load a saved guide representation from persisted data.
Arguments:
data -- object, The JSON data object.
Return:
True if successful.
"""
super(MainSrtComponentGuide, self).loadData( data )
self.mainSrtSizeInputAttr.setValue(data["mainSrtSize"])
self.mainSrtCtrl.xfo = data["mainSrtXfo"]
self.mainSrtCtrl.scalePoints(Vec3(data["mainSrtSize"], 1.0, data["mainSrtSize"]))
return True
def getRigBuildData(self):
"""Returns the Guide data used by the Rig Component to define the layout of the final rig.
Return:
The JSON rig data object.
"""
data = super(MainSrtComponentGuide, self).getRigBuildData()
data["mainSrtSize"] = self.mainSrtSizeInputAttr.getValue()
data["mainSrtXfo"] = self.mainSrtCtrl.xfo
return data
# ==============
# Class Methods
# ==============
@classmethod
def getComponentType(cls):
"""Enables introspection of the class prior to construction to determine if it is a guide component.
Return:
The true if this component is a guide component.
"""
return 'Guide'
@classmethod
def getRigComponentClass(cls):
"""Returns the corresponding rig component class for this guide component class
Return:
The rig component class.
"""
return MainSrtComponentRig
class MainSrtComponentRig(MainSrtComponent):
"""MainSrt Component Rig"""
def __init__(self, name='mainSrt', parent=None):
Profiler.getInstance().push("Construct MainSrt Rig Component:" + name)
super(MainSrtComponentRig, self).__init__(name, parent)
# =========
# Controls
# =========
# Add Controls
self.mainSRTCtrlSpace = CtrlSpace('SRT', parent=self.ctrlCmpGrp)
self.mainSRTCtrl = Control('SRT', shape='circle', parent=self.mainSRTCtrlSpace)
self.mainSRTCtrl.lockScale(x=True, y=True, z=True)
self.offsetCtrlSpace = CtrlSpace('Offset', parent=self.mainSRTCtrl)
self.offsetCtrl = Control('Offset', shape='circle', parent=self.offsetCtrlSpace)
self.offsetCtrl.setColor("orange")
self.offsetCtrl.lockScale(x=True, y=True, z=True)
# Add Component Params to IK control
mainSrtSettingsAttrGrp = AttributeGroup('DisplayInfo_MainSrtSettings', parent=self.mainSRTCtrl)
self.rigScaleAttr = ScalarAttribute('rigScale', value=1.0, parent=mainSrtSettingsAttrGrp, minValue=0.1, maxValue=100.0)
self.rigScaleOutputAttr.connect(self.rigScaleAttr)
# ==========
# Deformers
# ==========
# ==============
# Constrain I/O
# ==============
# Constraint inputs
# Constraint outputs
srtConstraint = PoseConstraint('_'.join([self.srtOutputTgt.getName(), 'To', self.mainSRTCtrl.getName()]))
srtConstraint.addConstrainer(self.mainSRTCtrl)
self.srtOutputTgt.addConstraint(srtConstraint)
offsetConstraint = PoseConstraint('_'.join([self.offsetOutputTgt.getName(), 'To', self.mainSRTCtrl.getName()]))
offsetConstraint.addConstrainer(self.offsetCtrl)
self.offsetOutputTgt.addConstraint(offsetConstraint)
# ===============
# Add Splice Ops
# ===============
#Add Rig Scale Splice Op
self.rigScaleSpliceOp = SpliceOperator('rigScaleSpliceOp', 'RigScaleSolver', 'Kraken')
self.addOperator(self.rigScaleSpliceOp)
# Add Att Inputs
self.rigScaleSpliceOp.setInput('drawDebug', self.drawDebugInputAttr)
self.rigScaleSpliceOp.setInput('rigScale', self.rigScaleOutputAttr)
# Add Xfo Inputs
# Add Xfo Outputs
self.rigScaleSpliceOp.setOutput('target', self.mainSRTCtrlSpace)
Profiler.getInstance().pop()
def loadData(self, data=None):
"""Load a saved guide representation from persisted data.
Arguments:
data -- object, The JSON data object.
Return:
True if successful.
"""
super(MainSrtComponentRig, self).loadData( data )
# ================
# Resize Controls
# ================
self.mainSRTCtrl.scalePoints(Vec3(data["mainSrtSize"], 1.0, data["mainSrtSize"]))
self.offsetCtrl.scalePoints(Vec3(data["mainSrtSize"] - 0.5, 1.0, data["mainSrtSize"] - 0.5))
# =======================
# Set Control Transforms
# =======================
self.mainSRTCtrlSpace.xfo = data["mainSrtXfo"]
self.mainSRTCtrl.xfo = data["mainSrtXfo"]
self.offsetCtrlSpace.xfo = data["mainSrtXfo"]
self.offsetCtrl.xfo = data["mainSrtXfo"]
# ============
# Set IO Xfos
# ============
self.srtOutputTgt = data["mainSrtXfo"]
self.offsetOutputTgt = data["mainSrtXfo"]
from kraken.core.kraken_system import KrakenSystem
ks = KrakenSystem.getInstance()
ks.registerComponent(MainSrtComponentGuide)
ks.registerComponent(MainSrtComponentRig)
|
py | b416fbb37e030f972d7ca7196373afaeaf77cfed | import socket
import ssl
import pytest
from six.moves import urllib
import httplib2
import tests
def test_get_via_https():
# Test that we can handle HTTPS
http = httplib2.Http(ca_certs=tests.CA_CERTS)
with tests.server_const_http(tls=True) as uri:
response, _ = http.request(uri, "GET")
assert response.status == 200
def test_get_301_via_https():
http = httplib2.Http(ca_certs=tests.CA_CERTS)
glocation = [""] # nonlocal kind of trick, maybe redundant
def handler(request):
if request.uri == "/final":
return tests.http_response_bytes(body=b"final")
return tests.http_response_bytes(status="301 goto", headers={"location": glocation[0]})
with tests.server_request(handler, request_count=2, tls=True) as uri:
glocation[0] = urllib.parse.urljoin(uri, "/final")
response, content = http.request(uri, "GET")
assert response.status == 200
assert content == b"final"
assert response.previous.status == 301
assert response.previous["location"] == glocation[0]
def test_get_301_via_https_spec_violation_on_location():
# Test that we follow redirects through HTTPS
# even if they violate the spec by including
# a relative Location: header instead of an absolute one.
http = httplib2.Http(ca_certs=tests.CA_CERTS)
def handler(request):
if request.uri == "/final":
return tests.http_response_bytes(body=b"final")
return tests.http_response_bytes(status="301 goto", headers={"location": "/final"})
with tests.server_request(handler, request_count=2, tls=True) as uri:
response, content = http.request(uri, "GET")
assert response.status == 200
assert content == b"final"
assert response.previous.status == 301
def test_invalid_ca_certs_path():
http = httplib2.Http(ca_certs="/nosuchfile")
with tests.server_const_http(request_count=0, tls=True) as uri:
with tests.assert_raises(IOError):
http.request(uri, "GET")
def test_not_trusted_ca():
# Test that we get a SSLHandshakeError if we try to access
# server using a CA cert file that doesn't contain server's CA.
http = httplib2.Http(ca_certs=tests.CA_UNUSED_CERTS)
with tests.server_const_http(tls=True) as uri:
try:
http.request(uri, "GET")
assert False, "expected CERTIFICATE_VERIFY_FAILED"
except ssl.SSLError as e:
assert e.reason == "CERTIFICATE_VERIFY_FAILED"
except httplib2.SSLHandshakeError: # Python2
pass
ssl_context_accept_version = hasattr(tests.ssl_context(), "maximum_version") and hasattr(
tests.ssl_context(), "minimum_version"
)
@pytest.mark.skipif(not ssl_context_accept_version, reason="ssl doesn't support TLS min/max")
@pytest.mark.parametrize("attr", ("maximum_version", "minimum_version"))
@pytest.mark.parametrize("version", (None, "TLSv1_2", ssl.TLSVersion.TLSv1_2) if ssl_context_accept_version else (None,))
def test_set_tls_version(attr, version):
# We expect failure on Python < 3.7 or OpenSSL < 1.1
expect_success = hasattr(ssl.SSLContext(), attr)
kwargs = {"tls_" + attr: version}
http = httplib2.Http(**kwargs)
try:
http.request(tests.DUMMY_HTTPS_URL)
except RuntimeError:
assert not expect_success
except socket.error:
assert expect_success
@pytest.mark.skipif(
not hasattr(tests.ssl_context(), "maximum_version"),
reason="ssl doesn't support TLS min/max",
)
def test_max_tls_version():
http = httplib2.Http(ca_certs=tests.CA_CERTS, tls_maximum_version="TLSv1_2")
with tests.server_const_http(tls=True) as uri:
http.request(uri)
_, tls_ver, _ = http.connections.popitem()[1].sock.cipher()
assert "TLSv1.0" <= tls_ver <= "TLSv1.2"
def test_client_cert_verified():
cert_log = []
def setup_tls(context, server, skip_errors):
context.load_verify_locations(cafile=tests.CA_CERTS)
context.verify_mode = ssl.CERT_REQUIRED
return context.wrap_socket(server, server_side=True)
def handler(request):
cert_log.append(request.client_sock.getpeercert())
return tests.http_response_bytes()
http = httplib2.Http(ca_certs=tests.CA_CERTS)
with tests.server_request(handler, tls=setup_tls) as uri:
uri_parsed = urllib.parse.urlparse(uri)
http.add_certificate(tests.CLIENT_PEM, tests.CLIENT_PEM, uri_parsed.netloc)
http.request(uri)
assert len(cert_log) == 1
expect_serial = tests.x509_serial(tests.CLIENT_PEM) if tests.x509 else 16332984194609126127
assert int(cert_log[0]["serialNumber"], base=16) == expect_serial
def test_client_cert_password_verified():
cert_log = []
def setup_tls(context, server, skip_errors):
context.load_verify_locations(cafile=tests.CA_CERTS)
context.verify_mode = ssl.CERT_REQUIRED
return context.wrap_socket(server, server_side=True)
def handler(request):
cert_log.append(request.client_sock.getpeercert())
return tests.http_response_bytes()
http = httplib2.Http(ca_certs=tests.CA_CERTS)
with tests.server_request(handler, tls=setup_tls) as uri:
uri_parsed = urllib.parse.urlparse(uri)
http.add_certificate(tests.CLIENT_ENCRYPTED_PEM, tests.CLIENT_ENCRYPTED_PEM, uri_parsed.netloc, password="12345")
http.request(uri)
assert len(cert_log) == 1
expect_serial = tests.x509_serial(tests.CLIENT_ENCRYPTED_PEM) if tests.x509 else 16332984194609126128
assert int(cert_log[0]["serialNumber"], base=16) == expect_serial
@pytest.mark.skipif(
not hasattr(tests.ssl_context(), "set_servername_callback"),
reason="SSLContext.set_servername_callback is not available",
)
def test_sni_set_servername_callback():
sni_log = []
def setup_tls(context, server, skip_errors):
context.set_servername_callback(lambda _sock, hostname, _context: sni_log.append(hostname))
return context.wrap_socket(server, server_side=True)
http = httplib2.Http(ca_certs=tests.CA_CERTS)
with tests.server_const_http(tls=setup_tls) as uri:
uri_parsed = urllib.parse.urlparse(uri)
http.request(uri)
assert sni_log == [uri_parsed.hostname]
|
py | b416fbf80962e3e5d2cd5310c181cc4aa00f42c6 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_parser
----------------------------------
Tests for parser module.
"""
# pylint: disable=locally-disabled,redefined-outer-name
import pytest
from mindmeld import markup
from mindmeld.components.parser import Parser
from mindmeld.exceptions import ParserTimeout
class TestBasicParser:
"""A set of tests for a basic parser with no constraints"""
CONFIG = {'head': ['dependent']}
@classmethod
def setup_class(cls):
"""Creates the parser for this group of tests"""
cls.parser = Parser(config=cls.CONFIG)
def test_no_entities(self):
"""Tests the parser returns no groups when there are no entities"""
query = markup.load_query('Hello there')
entities = self.parser.parse_entities(query.query, query.entities)
assert entities == ()
def test_singleton(self):
"""Tests the parser returns no groups when a head has no dependents"""
query = markup.load_query('Hello {there|head}')
entities = self.parser.parse_entities(query.query, query.entities, timeout=None)
assert entities == query.entities
def test_left(self):
"""Tests the parser attaches dependents from the left"""
query = markup.load_query('{Hello|dependent} {there|head}')
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 2
assert entities[0].parent == entities[1]
assert entities[1].children == (entities[0],)
def test_right(self):
"""Tests the parser attaches dependents from the right"""
query = markup.load_query('{Hello|head} {there|dependent}')
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 2
assert entities[0].children == (entities[1],)
assert entities[1].parent == entities[0]
def test_distance(self):
"""Tests the parser attaches dependents to their nearest head"""
query = markup.load_query('{Hello|head} {there|dependent} my {friend|head}')
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 3
assert entities[0].children == (entities[1],)
assert entities[1].parent == entities[0]
assert entities[2].children is None
def test_unconfigured(self):
"""Tests the parser functions when unconfigured entities are present"""
query = markup.load_query('{Hello|head} {there|other}')
entities = self.parser.parse_entities(query.query, query.entities)
assert entities
class TestRoleParser:
"""A set of tests for a parser which has nested groups"""
CONFIG = {
'dish|beverage': ['option|beverage', 'size'],
'dish': ['option', 'size']
}
@classmethod
def setup_class(cls):
"""Creates the parser for this group of tests"""
cls.parser = Parser(config=cls.CONFIG)
def test_generic(self):
"""Tests groups where no roles are specified in the config"""
query = markup.load_query('{noodles|dish|main_course} with {tofu|option}')
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 2
assert entities[0].children == (entities[1],)
assert entities[1].parent == entities[0]
def test_with_role(self):
"""Tests groups when roles are explicitly specified in the config"""
text = '{large|size} {latte|dish|beverage} {ice|option|beverage}'
query = markup.load_query(text)
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 3
assert entities[0].parent == entities[1]
assert entities[1].children == (entities[0], entities[2])
text = 'I’d like a {muffin|dish|baked_good} with {no sugar|option|beverage}'
query = markup.load_query(text)
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 2
assert entities[0].children is None
assert entities[1].parent is None
text = 'I’d like a {latte|dish|beverage} with {maple syrup|option|general}'
query = markup.load_query(text)
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 2
assert entities[0].children is None
assert entities[1].parent is None
class TestNestedParser:
"""A set of tests for a parser which has nested groups"""
CONFIG = {
'dish': ['option', 'size'],
'option': ['size']
}
@classmethod
def setup_class(cls):
"""Creates the parser for this group of tests"""
cls.parser = Parser(config=cls.CONFIG)
def test_standalone_option(self):
"""Tests that an option can exist as a standalone group"""
query = markup.load_query('{light|size} {ice|option}')
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 2
assert entities[0].parent == entities[1]
assert entities[1].children == (entities[0],)
def test_nested(self):
"""Tests that an option can exist as a standalone group"""
text = '{large|size} {latte|dish} {light|size} {ice|option}'
query = markup.load_query(text)
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 4
assert entities[0].parent == entities[1]
assert entities[1].children == (entities[0], entities[3])
assert entities[2].parent == entities[3]
assert entities[3].children == (entities[2],)
class TestMaxInstancesParser:
"""A set of tests for a parser which has max instance constraints on groups"""
CONFIG = {
'dish': {
'option': {},
'size': {'max_instances': 1} # only one size per dish
},
'option': {
'size': {'max_instances': 1} # only one size per option
}
}
@classmethod
def setup_class(cls):
"""Creates the parser for this group of tests"""
cls.parser = Parser(config=cls.CONFIG)
def test_max_instances(self):
"""Tests that parser respects the max instances constraint"""
text = '{light|size} {medium|size} {latte|dish}'
query = markup.load_query(text)
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 3
assert entities[0].parent is None
assert entities[1].parent == entities[2]
assert entities[2].children == (entities[1],)
def test_distance_override(self):
"""Tests that parser correctly allocates one size per dish,
overriding distance in the process.
"""
text = '{latte|dish} size {medium|size}, {mocha|dish} size {large|size}'
query = markup.load_query(text)
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 4
assert entities[0].children == (entities[1],)
assert entities[1].parent == entities[0]
assert entities[2].children == (entities[3],)
assert entities[3].parent == entities[2]
class TestParserLinkWords:
"""A set of tests for a parser with link words"""
CONFIG = {
'dish': {
'option': {'linking_words': {'with'}},
'size': {},
}
}
@classmethod
def setup_class(cls):
"""Creates the parser for this group of tests"""
cls.parser = Parser(config=cls.CONFIG)
def test_link_word(self):
"""Tests that parser considers link words, overriding default distance calculation."""
text = 'A {pizza|dish} with {olives|option}, {breadsticks|dish} and a {coke|dish}'
query = markup.load_query(text)
entities = self.parser.parse_entities(query.query, query.entities)
assert len(entities) == 4
assert entities[0].children == (entities[1],)
assert entities[1].parent == entities[0]
assert entities[2].children is None
assert entities[3].children is None
def test_link_word_negative(self):
"""Tests that parser does not apply link words for other dependent types."""
text = 'A {pepperoni pizza|dish} with {large|size} {coke|dish}'
query = markup.load_query(text)
entities = self.parser.parse_entities(query.query, query.entities)
assert len([e for e in entities if e.parent is None and e.children is not None]) == 1
assert entities[0].children is None
assert entities[1].parent == entities[2]
assert entities[2].children == (entities[1],)
def test_parser_timeout():
"""Tests that the parser throws a ParserTimeout exception on very ambiguous queries
which take long to entities.
"""
config = {
'name': {
'form': {'max_instances': 1},
'size': {'max_instances': 1},
'number': {'max_instances': 1, 'right': False},
'option': {'linking_words': ['with']}
}
}
parser = Parser(config=config)
text = ('{venti|size} {jade citrus|name} with {one|number} bag of '
'{peach tranquility|name} and {one|number} bag {jade citrus|name} '
'{2 pumps peppermint|option} {no hot water|option} sub {steamed|option} '
'{lemonade|option} {4|number} {honeys|option}')
query = markup.load_query(text)
with pytest.raises(ParserTimeout):
parser.parse_entities(query.query, query.entities, handle_timeout=False)
|
py | b416fd0be6fbf14b6f7d9445191871e06a963283 | import re
import pexpect
from typing import List, Optional, Tuple
from typing.io import IO
from interfaces.AbstractDeviceIO import AbstractDeviceIO
class PexpectDeviceIO(AbstractDeviceIO):
def read_expect_regex(self, list_of_regex: List[str], timeout: int) -> Tuple[str, dict]:
try:
# pexpect compiles regexes with DOTALL option which makes the '.' symbol include a new line too,
# which will compare the regex with multiple lines. But we want our regexes to be compared with outputs
# only from one line. To do that we compile our pattern without the DOTALL option before giving it to the
# expect function.
compiled_list_of_regex = []
for pattern in list_of_regex:
pattern = pattern.encode("ascii")
compiled_list_of_regex.append(re.compile(pattern))
match = self._session.expect(compiled_list_of_regex, timeout=timeout)
# Captured regex data in dictionary
captured_variables_dict = self._session.match.groupdict()
# Format dictionary values from bytes to strings
captured_variables_dict = dict((k, v.decode('ascii')) for k, v in captured_variables_dict.items())
# return captured variables dictionary and the matched regex
return list_of_regex[match], captured_variables_dict
except (pexpect.exceptions.TIMEOUT, pexpect.exceptions.EOF) as e:
raise TimeoutError(e)
def write(self, output: str):
self._session.write(output)
def __init__(self, command: str, log_file: Optional[IO]):
"""
Implementation of the DeviceIO API using pexpect.
@param command: The command to communicate with.
@param log_file: Logfile to log the communications to.
"""
self._session = pexpect.spawn(command, echo=False, logfile=log_file)
|
py | b416fda9f7456667b2dde67f1b2a289cdcf60288 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class GPUDevice(object):
"""Provides information about an individual GPU device.
On platforms which support them, the vendor_id and device_id are
PCI IDs. On other platforms, the vendor_string and device_string
are platform-dependent strings.
"""
_VENDOR_ID_MAP = {
0x1002: 'ATI',
0x8086: 'Intel',
0x10de: 'Nvidia',
}
def __init__(self, vendor_id, device_id, vendor_string, device_string):
self._vendor_id = vendor_id
self._device_id = device_id
self._vendor_string = vendor_string
self._device_string = device_string
def __str__(self):
vendor = 'VENDOR = 0x%x' % self._vendor_id
vendor_string = self._vendor_string
if not vendor_string and self._vendor_id in self._VENDOR_ID_MAP:
vendor_string = self._VENDOR_ID_MAP[self._vendor_id]
if vendor_string:
vendor += ' (%s)' % vendor_string
device = 'DEVICE = 0x%x' % self._device_id
if self._device_string:
device += ' (%s)' % self._device_string
return '%s, %s' % (vendor, device)
@classmethod
def FromDict(cls, attrs):
"""Constructs a GPUDevice from a dictionary. Requires the
following attributes to be present in the dictionary:
vendor_id
device_id
vendor_string
device_string
Raises an exception if any attributes are missing.
"""
return cls(attrs['vendor_id'], attrs['device_id'],
attrs['vendor_string'], attrs['device_string'])
@property
def vendor_id(self):
"""The GPU vendor's PCI ID as a number, or 0 if not available.
Most desktop machines supply this information rather than the
vendor and device strings."""
return self._vendor_id
@property
def device_id(self):
"""The GPU device's PCI ID as a number, or 0 if not available.
Most desktop machines supply this information rather than the
vendor and device strings."""
return self._device_id
@property
def vendor_string(self):
"""The GPU vendor's name as a string, or the empty string if not
available.
Most mobile devices supply this information rather than the PCI
IDs."""
return self._vendor_string
@property
def device_string(self):
"""The GPU device's name as a string, or the empty string if not
available.
Most mobile devices supply this information rather than the PCI
IDs."""
return self._device_string
|
py | b416fe5e5b1afdf5c33569b0aefaf13d5796f248 | # We shall say that an n-digit number is pandigital if it makes use of all the
# digits 1 to n exactly once. Find the sum of all products whose multiplicand/
# multiplier/product identity can be written as a 1 through 9 pandigital.
from functools import reduce
def is_pandigital_triplet(triplet):
triplet = map(str, triplet)
s = reduce(lambda x, y: x+y, triplet)
return ''.join(sorted(s)) == '123456789'
def pandigitals_sum():
pandigitals = []
# the only possible factor combinations are:
# 1-digit x 4-digit numbers
# 2-digit x 3-digit numbers
for i in range(2, 100):
j_start = 1234 if i < 10 else 123
j_end = 10**4//i
for j in range(j_start, j_end+1):
if is_pandigital_triplet([i, j, i*j]):
pandigitals.append(i*j)
return sum(list(set(pandigitals)))
if __name__ == '__main__':
assert is_pandigital_triplet([39, 186, 7254])
print(pandigitals_sum())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.