ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | b416231a784536462177f67d6346f2d7037fa0b9 | from typing import Optional
import pytest
from itunesdb.cli import data
###################
# Track.set_value #
###################
def test_set_value_ignored_value():
track = data.Track()
track.set_value("xyz", 1)
with pytest.raises(AttributeError):
# noinspection PyUnresolvedReferences
_ = track.xyz
def test_set_value_supported_value():
track = data.Track()
track.set_value("TRack CoUNT", 10)
assert track.track_count == 10
@pytest.mark.parametrize(
"album_artist, compilation, artist, expected",
[
("album_artist", True, None, "album_artist"),
("album_artist", False, None, "album_artist"),
(None, True, None, "Compilation"),
(None, False, None, None),
("album_artist", True, "artist", "album_artist"),
("album_artist", False, "artist", "album_artist"),
(None, True, "artist", "Compilation"),
(None, False, "artist", "artist"),
],
)
def test_computed_album_artist(
album_artist: Optional[str], compilation: bool, artist: Optional[str], expected: str
):
track = data.Track()
track.album_artist = album_artist
track.compilation = compilation
track.artist = artist
assert track.computed_album_artist == expected
#########
# Genre #
#########
def test_mismatched_name_and_full_name():
with pytest.raises(ValueError):
_ = data.Genre("abc", "abc: c")
def test_parent_full_name_for_root_genre():
genre = data.Genre("Folk")
assert genre.parent_full_name is None
def test_parent_full_name_for_nonroot_genre():
genre = data.Genre("Rock: Metal: Black Metal")
assert genre.parent_full_name == "Rock: Metal"
|
py | b416231b0fb42439f6dbeaa730b5f93a4c313441 | from django.db import models
# Create your models here.
class Category(models.Model):
category_name = models.CharField(max_length=100,null=False,blank=False)
def __str__(self):
return self.category_name
def save_category(self):
self.save()
def delete_category(self):
self.delete()
class Location(models.Model):
location_name=models.CharField(null=False,blank=False,max_length=100)
def __str__(self):
return self.location_name
def save_location(self):
self.save()
def delete_location(self):
self.delete()
@classmethod
def get_locations(cls):
location=Location.objects.all()
return location
class Image(models.Model):
image_name=models.CharField(max_length=50)
description=models.TextField()
category=models.ForeignKey(Category,on_delete=models.CASCADE, default='')
location=models.ForeignKey(Location,on_delete=models.CASCADE, default='0')
image=models.ImageField(upload_to='images/', default='0')
def __str__(self):
return self.image_name
def save_image(self):
self.save()
def delete_image(self):
self.delete()
@classmethod
def update_image(cls, id,image):
cls.objects.filter(id=id).update(image=image)
@classmethod
def search_category(cls,category):
image =cls.objects.filter(category__category__icontains=category)
return image
@classmethod
def fetch_by_location(cls,location_name):
location = cls.objects.filter(location__location_name = location_name).all()
return location
@classmethod
def get_image_by_id(cls, image_id):
image = cls.objects.get(id=image_id)
return image |
py | b416247463b11ce3a6f8b70f37087256086a5f9f | # -*- coding: utf-8 -*-
#
# michael a.g. aïvázis
# orthologue
# (c) 1998-2021 all rights reserved
#
# externals
import collections
import operator
import subprocess
# superclass
from .POSIX import POSIX
# the cpu info object
from .CPUInfo import CPUInfo
# declaration
class Linux(POSIX, family='pyre.platforms.linux'):
"""
Encapsulation of a generic linux host
"""
# public data
platform = 'linux'
distribution = 'generic'
prefix_library = 'lib'
extension_staticLibrary = '.a'
extension_dynamicLibrary = '.so'
template_staticLibrary = "{0.prefix_library}{1}{0.extension_staticLibrary}"
template_dynamicLibrary = "{0.prefix_library}{1}{0.extension_dynamicLibrary}"
# protocol obligations
@classmethod
def flavor(cls):
"""
Return a suitable default encapsulation of the runtime host
"""
# in python 3.8, {platform} doesn't have {linux_distribution} any more; the
# functionality has been delegated to the {distro} package
# so let's try
try:
# to get {distro}
import distro
# if that fails
except ImportError:
# fallback to the native python package; this is silly in the long term, but it's a
# reasonable workaround for current 3.7 users that don't have {distro}
import platform
# if it still has the deprecated function
try:
# identify the platform characteristics; careful not to set the {distribution}
# attribute here; the subclasses set the distribution name to the pyre
# canonical nickname
distribution, cls.release, cls.codename = platform.linux_distribution()
# just in case
distribution = distribution.lower()
# if this also fails
except AttributeError:
# there isn't much else to do; act like a generic linux system
return cls
# if {distro} is available
else:
# identify the platform characteristics; again, careful not to set the
# {distribution} attribute here; the subclasses set the distribution name to the
# pyre canonical nickname
distribution = distro.id()
cls.release = distro.version()
cls.codename = distro.codename()
# check for ubuntu
if distribution.startswith('ubuntu'):
# load the platform file
from .Ubuntu import Ubuntu
# and return it
return Ubuntu
# check for debian
if distribution.startswith('debian'):
# load the platform file
from .Debian import Debian
# and return it
return Debian
# check for red hat
if distribution.startswith('red hat') or distribution.startswith('rhel'):
# load the platform file
from .RedHat import RedHat
# and return it
return RedHat
# check for centos
if distribution.startswith('centos'):
# load the platform file
from .CentOS import CentOS
# and return it
return CentOS
# otherwise, act like a generic linux system
return cls
# implementation details: explorers
@classmethod
def cpuSurvey(cls):
"""
Collect information about the CPU resources on this host
"""
# first, let's try
try:
# to use {lscpu} to collect the information and return it
return cls.lscpu()
# if it's not available on this machine
except FileNotFoundError:
# no worries, we'll try something else
pass
# last resort, because it's heavily polluted by x86_64 peculiarities
return cls.procCPUInfo()
# implementation details: workhorses
@classmethod
def lscpu(cls):
"""
Invoke {lscpu} to gather CPU info
"""
# the name of the program that collates the cpu information
client = 'lscpu'
# the command line arguments
settings = {
'executable' : client,
'args': (
client,
),
'stdout': subprocess.PIPE, 'stderr': subprocess.PIPE,
'universal_newlines': True,
'shell': False
}
# initialize storage
sockets = 1
coresPerSocket = 1
threadsPerCore = 1
# make a pipe
with subprocess.Popen(**settings) as pipe:
# get the text source and tokenize it
tokens = cls.tokenizeCPUInfo(cpuinfo=pipe.stdout)
# parse
for key, value in tokens:
# number of sockets
if key == "Socket(s)":
# save
sockets = int(value)
# number of cores per socket
elif key == "Core(s) per socket":
# save
coresPerSocket = int(value)
# number of threads per core
elif key == "Thread(s) per core":
# save
threadsPerCore = int(value)
# make a cpu info object
info = CPUInfo()
# decorate
info.sockets = sockets
info.cores = sockets * coresPerSocket
info.cpus = info.cores * threadsPerCore
# and retur it
return info
@classmethod
def procCPUInfo(cls):
"""
Interrogate /proc for CPU info
This was the original manner in which pyre discovered cpu information. It appears that
the gathering of information was inadvertently polluted by what is available for
{x86_64} architectures, and fails to be useful on {ppc64le}. As a result, it has been
replaced by the method {lscpu} above that seems to slower but much more reliable.
"""
# initialize the cpu store
ids = 0
cpus = collections.defaultdict(dict)
# the markers
physicalid = None
# prime the tokenizer
tokens = cls.tokenizeCPUInfo(cpuinfo=open(cls.cpuinfo))
# the keys we care about
targets = {'siblings', 'cpu cores'}
# parse
for key, value in tokens:
# if the key is blank
if not key:
# reset the marker
physicalid = None
# and move on
continue
# record the processor ids; that's all we have on single core machines
if key == 'processor':
# increment the count
ids += 1
# move on
continue
# the socket to which this core belongs
if key == 'physical id':
# harvest the cpu physical id
physicalid = value
# move on
continue
# harvest the interesting info
if physicalid and key in targets:
# attach it to the right socket
cpus[physicalid][key] = value
# and move on
continue
# initialize the counters
sockets = physical = logical = 0
# reduce
for sec in cpus.values():
# update the cpu count
sockets += 1
# update the number of physical cores
physical += int(sec['cpu cores'])
# update the number of logical cores
logical += int(sec['siblings'])
# create an info object
info = CPUInfo()
# if the reduction produced non-zero results
if physical and logical:
# decorate it
info.sockets = sockets
info.cores = physical
info.cpus = logical
# and return it
return info
@classmethod
def tokenizeCPUInfo(cls, cpuinfo):
"""
Split the CPU info file into (key, value) pairs
"""
# in order to tokenize each line
for line in cpuinfo:
# strip whitespace
line = line.strip()
# if this leaves us with nothing, we ran into a separator blank line
if not line:
# form a pair of blank tokens
key = value = ''
# otherwise
else:
# split apart and strip leading and trailing whitespace
key, value = map(operator.methodcaller('strip'), line.split(':', maxsplit=1))
# yield the tokens
yield key, value
# nothing more
return
# implementation constants
issue = '/etc/issue'
cpuinfo = '/proc/cpuinfo'
# end of file
|
py | b41626fc7c49c8ce3e8bb371677d3921e28f87e2 | from graphql import parse, build_ast_schema, GraphQLSchema
from ..fixtures import big_schema_sdl # noqa: F401
def test_build_schema_from_ast(benchmark, big_schema_sdl): # noqa: F811
schema_ast = parse(big_schema_sdl)
schema: GraphQLSchema = benchmark(
lambda: build_ast_schema(schema_ast, assume_valid=True)
)
assert schema.query_type is not None
|
py | b4162816ae6d3bcd26330288978f8f180f37ca27 | from __future__ import print_function
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#===- lib/asan/scripts/asan_symbolize.py -----------------------------------===#
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
#===------------------------------------------------------------------------===#
#
# This file has local modifications needed for ClusterFuzz project.
#
# Disable all pylint warnings/errors as this is based on external code.
# pylint: disable-all
from builtins import object
from builtins import str
from past.builtins import cmp
import os
import re
import six
import subprocess
import sys
from base import utils
from google_cloud_utils import storage
from metrics import logs
from platforms.android import adb
from platforms.android import fetch_artifact
from platforms.android import settings
from system import archive
from system import environment
from system import shell
try:
import pty
import termios
except ImportError:
# Applies only on unix platforms.
pass
stack_inlining = 'false'
llvm_symbolizer_path = ''
pipes = []
symbolizers = {}
class LineBuffered(object):
"""Disable buffering on a file object."""
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
if '\n' in data:
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
# Construct a path to the .dSYM bundle for the given binary.
# There are three possible cases for binary location in Chromium:
# 1. The binary is a standalone executable or dynamic library in the product
# dir, the debug info is in "binary.dSYM" in the product dir.
# 2. The binary is a standalone framework or .app bundle, the debug info is in
# "Framework.dSYM" or "App.dSYM" in the product dir.
# 3. The binary is a framework or an .app bundle within another .app bundle
# (e.g. Outer.app/Contents/Versions/1.2.3.4/Inner.app), and the debug info
# is in Inner.dSYM in the product dir.
# The first case is handled by llvm-symbolizer, so we only need to construct
# .dSYM paths for .app bundles and frameworks.
# We're assuming that there're no more than two nested bundles in the binary
# path. Only one of these bundles may be a framework and frameworks cannot
# contain other bundles.
def chrome_dsym_hints(binary):
"""Construct a path to the .dSYM bundle for the given binary.
There are three possible cases for binary location in Chromium:
1. The binary is a standalone executable or dynamic library in the product
dir, the debug info is in "binary.dSYM" in the product dir.
2. The binary is a standalone framework or .app bundle, the debug info is in
"Framework.framework.dSYM" or "App.app.dSYM" in the product dir.
3. The binary is a framework or an .app bundle within another .app bundle
(e.g. Outer.app/Contents/Versions/1.2.3.4/Inner.app), and the debug info
is in Inner.app.dSYM in the product dir.
The first case is handled by llvm-symbolizer, so we only need to construct
.dSYM paths for .app bundles and frameworks."""
path_parts = binary.split(os.path.sep)
app_positions = []
framework_positions = []
for index, part in enumerate(path_parts):
if part.endswith('.app'):
app_positions.append(index)
elif part.endswith('.framework'):
framework_positions.append(index)
bundle_positions = app_positions + framework_positions
if len(bundle_positions) == 0:
# Case 1: this is a standalone executable or dylib.
return []
# Cases 2 and 3. The outermost bundle (which is the only bundle in the case 2)
# is located in the product dir.
bundle_positions.sort()
outermost_bundle = bundle_positions[0]
product_dir = path_parts[:outermost_bundle]
# In case 2 this is the same as |outermost_bundle|.
innermost_bundle = bundle_positions[-1]
innermost_bundle_dir = path_parts[innermost_bundle]
innermost_bundle_dir = utils.strip_from_right(innermost_bundle_dir, '.app')
innermost_bundle_dir = utils.strip_from_right(innermost_bundle_dir,
'.framework')
dsym_path = product_dir + [innermost_bundle_dir]
result = '%s.dSYM' % os.path.sep.join(dsym_path)
return [result]
def disable_buffering():
"""Make this process and child processes stdout unbuffered."""
os.environ['PYTHONUNBUFFERED'] = '1'
if not isinstance(sys.stdout, LineBuffered):
# Don't wrap sys.stdout if it is already wrapped.
# See https://github.com/google/clusterfuzz/issues/234 for why.
# Since sys.stdout is a C++ object, it's impossible to do sys.stdout.write =
# lambda...
sys.stdout = LineBuffered(sys.stdout)
def fix_filename(file_name):
"""Clean up the filename, nulls out tool specific ones."""
file_name = re.sub('.*asan_[a-z_]*.cc:[0-9]*', '_asan_rtl_', file_name)
file_name = re.sub('.*crtstuff.c:0', '', file_name)
file_name = re.sub(':0$', '', file_name)
# If we don't have a file name, just bail out.
if not file_name or file_name.startswith('??'):
return ''
return os.path.normpath(file_name)
def fix_function_name(function_name):
"""Clean up function name."""
if function_name.startswith('??'):
return ''
return function_name
def get_stack_frame(binary, addr, function_name, file_name):
"""Return a stack frame entry."""
# Cleanup file and function name.
file_name = fix_filename(file_name)
function_name = fix_function_name(function_name)
# Check if we don't have any symbols at all. If yes, this is probably
# a system library. In this case, just return the binary name.
if not function_name and not file_name:
return '%s in %s' % (addr, os.path.basename(binary))
# We just have a file name. Probably running in global context.
if not function_name:
# Filter the filename to act as a function name.
filtered_file_name = os.path.basename(file_name)
return '%s in %s %s' % (addr, filtered_file_name, file_name)
# Regular stack frame.
return '%s in %s %s' % (addr, function_name, file_name)
def is_valid_arch(s):
"""Check if this is a valid supported architecture."""
return s in [
"i386", "x86_64", "x86_64h", "arm", "armv6", "armv7", "armv7s", "armv7k",
"arm64", "powerpc64", "powerpc64le", "s390x", "s390"
]
def guess_arch(address):
"""Guess which architecture we're running on (32/64).
10 = len('0x') + 8 hex digits."""
if len(address) > 10:
return 'x86_64'
else:
return 'i386'
class Symbolizer(object):
def __init__(self):
pass
def symbolize(self, addr, binary, offset):
"""Symbolize the given address (pair of binary and offset).
Overriden in subclasses.
Args:
addr: virtual address of an instruction.
binary: path to executable/shared object containing this instruction.
offset: instruction offset in the @binary.
Returns:
list of strings (one string for each inlined frame) describing
the code locations for this instruction (that is, function name, file
name, line and column numbers).
"""
return None
class LLVMSymbolizer(Symbolizer):
def __init__(self, symbolizer_path, default_arch, system, dsym_hints=[]):
super(LLVMSymbolizer, self).__init__()
self.symbolizer_path = symbolizer_path
self.default_arch = default_arch
self.system = system
self.dsym_hints = dsym_hints
self.pipe = self.open_llvm_symbolizer()
def open_llvm_symbolizer(self):
if not os.path.exists(self.symbolizer_path):
return None
# Setup symbolizer command line.
cmd = [
self.symbolizer_path,
'--default-arch=%s' % self.default_arch, '--demangle=true',
'--functions=linkage',
'--inlining=%s' % stack_inlining, '--use-symbol-table=true'
]
if self.system == 'darwin':
for hint in self.dsym_hints:
cmd.append('--dsym-hint=%s' % hint)
# Set LD_LIBRARY_PATH to use the right libstdc++.
env_copy = environment.copy()
env_copy['LD_LIBRARY_PATH'] = os.path.dirname(self.symbolizer_path)
# FIXME: Since we are not using process_handler.run_process here, we can run
# into issues with unicode environment variable and values. Add this
# explicit hack to convert these into strings.
env_copy = {str(key): str(value) for key, value in six.iteritems(env_copy)}
# Run the symbolizer.
pipe = subprocess.Popen(
cmd, env=env_copy, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
global pipes
pipes.append(pipe)
return pipe
def symbolize(self, addr, binary, offset):
"""Overrides Symbolizer.symbolize."""
if not binary.strip():
return ['%s in' % addr]
result = []
try:
symbolizer_input = '"%s" %s' % (binary, offset)
self.pipe.stdin.write(symbolizer_input.encode('utf-8') + b'\n')
self.pipe.stdin.flush()
while True:
function_name = self.pipe.stdout.readline().rstrip().decode('utf-8')
if not function_name:
break
file_name = self.pipe.stdout.readline().rstrip().decode('utf-8')
result.append(get_stack_frame(binary, addr, function_name, file_name))
except Exception:
logs.log_error('Symbolization using llvm-symbolizer failed for: "%s".' %
symbolizer_input)
result = []
if not result:
result = None
return result
def LLVMSymbolizerFactory(system, default_arch, dsym_hints=[]):
return LLVMSymbolizer(llvm_symbolizer_path, default_arch, system, dsym_hints)
class Addr2LineSymbolizer(Symbolizer):
def __init__(self, binary):
super(Addr2LineSymbolizer, self).__init__()
self.binary = binary
self.pipe = self.open_addr2line()
def open_addr2line(self):
cmd = ['addr2line', '--demangle', '-f', '-e', self.binary]
pipe = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
global pipes
pipes.append(pipe)
return pipe
def symbolize(self, addr, binary, offset):
"""Overrides Symbolizer.symbolize."""
if self.binary != binary:
return None
if not binary.strip():
return ['%s in' % addr]
try:
symbolizer_input = str(offset).encode('utf-8')
self.pipe.stdin.write(symbolizer_input + b'\n')
self.pipe.stdin.flush()
function_name = self.pipe.stdout.readline().rstrip().decode('utf-8')
file_name = self.pipe.stdout.readline().rstrip().decode('utf-8')
except Exception:
logs.log_error('Symbolization using addr2line failed for: "%s %s".' %
(binary, str(offset)))
function_name = ''
file_name = ''
return [get_stack_frame(binary, addr, function_name, file_name)]
class UnbufferedLineConverter(object):
"""Wrap a child process that responds to each line of input with one line of output.
Uses pty to trick the child into providing unbuffered output.
"""
def __init__(self, args, close_stderr=False):
pid, fd = pty.fork()
if pid == 0:
# We're the child. Transfer control to command.
if close_stderr:
dev_null = os.open('/dev/null', 0)
os.dup2(dev_null, 2)
os.execvp(args[0], args)
else:
# Disable echoing.
attr = termios.tcgetattr(fd)
attr[3] = attr[3] & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSANOW, attr)
# Set up a file()-like interface to the child process
self.r = os.fdopen(fd, 'r', 1)
self.w = os.fdopen(os.dup(fd), 'w', 1)
def convert(self, line):
self.w.write(line + '\n')
return self.readline()
def readline(self):
return self.r.readline().rstrip()
class DarwinSymbolizer(Symbolizer):
def __init__(self, addr, binary, arch):
super(DarwinSymbolizer, self).__init__()
self.binary = binary
self.arch = arch
self.open_atos()
def open_atos(self):
cmdline = ['atos', '-o', self.binary, '-arch', self.arch]
self.atos = UnbufferedLineConverter(cmdline, close_stderr=True)
def symbolize(self, addr, binary, offset):
"""Overrides Symbolizer.symbolize."""
if self.binary != binary:
return None
try:
atos_line = self.atos.convert('0x%x' % int(offset, 16))
while 'got symbolicator for' in atos_line:
atos_line = self.atos.readline()
# A well-formed atos response looks like this:
# foo(type1, type2) (in object.name) (filename.cc:80)
match = re.match('^(.*) \(in (.*)\) \((.*:\d*)\)$', atos_line)
if match:
function_name = match.group(1)
function_name = re.sub('\(.*?\)', '', function_name)
file_name = match.group(3)
return [get_stack_frame(binary, addr, function_name, file_name)]
else:
return ['%s in %s' % (addr, atos_line)]
except Exception:
logs.log_error('Symbolization using atos failed for: "%s %s".' %
(binary, str(offset)))
return ['{} ({}:{}+{})'.format(addr, binary, self.arch, offset)]
# Chain several symbolizers so that if one symbolizer fails, we fall back
# to the next symbolizer in chain.
class ChainSymbolizer(Symbolizer):
def __init__(self, symbolizer_list):
super(ChainSymbolizer, self).__init__()
self.symbolizer_list = symbolizer_list
def symbolize(self, addr, binary, offset):
"""Overrides Symbolizer.symbolize."""
for symbolizer in self.symbolizer_list:
if symbolizer:
result = symbolizer.symbolize(addr, binary, offset)
if result:
return result
return None
def append_symbolizer(self, symbolizer):
self.symbolizer_list.append(symbolizer)
def SystemSymbolizerFactory(system, addr, binary, arch):
if system == 'darwin':
return DarwinSymbolizer(addr, binary, arch)
elif system.startswith('linux'):
return Addr2LineSymbolizer(binary)
class SymbolizationLoop(object):
def __init__(self, binary_path_filter=None, dsym_hint_producer=None):
# Used by clients who may want to supply a different binary name.
# E.g. in Chrome several binaries may share a single .dSYM.
self.binary_path_filter = binary_path_filter
self.dsym_hint_producer = dsym_hint_producer
self.system = sys.platform
self.llvm_symbolizers = {}
self.last_llvm_symbolizer = None
self.dsym_hints = set([])
def symbolize_address(self, addr, binary, offset, arch):
# On non-Darwin (i.e. on platforms without .dSYM debug info) always use
# a single symbolizer binary.
# On Darwin, if the dsym hint producer is present:
# 1. check whether we've seen this binary already; if so,
# use |llvm_symbolizers[binary]|, which has already loaded the debug
# info for this binary (might not be the case for
# |last_llvm_symbolizer|);
# 2. otherwise check if we've seen all the hints for this binary already;
# if so, reuse |last_llvm_symbolizer| which has the full set of hints;
# 3. otherwise create a new symbolizer and pass all currently known
# .dSYM hints to it.
if not binary in self.llvm_symbolizers:
use_new_symbolizer = True
if self.system == 'darwin' and self.dsym_hint_producer:
dsym_hints_for_binary = set(self.dsym_hint_producer(binary))
use_new_symbolizer = bool(dsym_hints_for_binary - self.dsym_hints)
self.dsym_hints |= dsym_hints_for_binary
if self.last_llvm_symbolizer and not use_new_symbolizer:
self.llvm_symbolizers[binary] = self.last_llvm_symbolizer
else:
self.last_llvm_symbolizer = LLVMSymbolizerFactory(
self.system, arch, self.dsym_hints)
self.llvm_symbolizers[binary] = self.last_llvm_symbolizer
# Use the chain of symbolizers:
# LLVM symbolizer -> addr2line/atos
# (fall back to next symbolizer if the previous one fails).
if not binary in symbolizers:
symbolizers[binary] = ChainSymbolizer([self.llvm_symbolizers[binary]])
result = symbolizers[binary].symbolize(addr, binary, offset)
if result is None:
# Initialize system symbolizer only if other symbolizers failed.
symbolizers[binary].append_symbolizer(
SystemSymbolizerFactory(self.system, addr, binary, arch))
result = symbolizers[binary].symbolize(addr, binary, offset)
# The system symbolizer must produce some result.
assert result
return result
def process_stacktrace(self, unsymbolized_crash_stacktrace):
self.frame_no = 0
symbolized_crash_stacktrace = u''
for line in unsymbolized_crash_stacktrace.splitlines():
self.current_line = utils.decode_to_unicode(line.rstrip())
# 0 0x7f6e35cf2e45 (/blah/foo.so+0x11fe45)
stack_trace_line_format = (
'^( *#([0-9]+) *)(0x[0-9a-f]+) *\(([^+]*)\+(0x[0-9a-f]+)\)')
match = re.match(stack_trace_line_format, line)
if not match:
symbolized_crash_stacktrace += u'%s\n' % self.current_line
continue
_, frameno_str, addr, binary, offset = match.groups()
arch = ""
# Arch can be embedded in the filename, e.g.: "libabc.dylib:x86_64h"
colon_pos = binary.rfind(":")
if colon_pos != -1:
maybe_arch = binary[colon_pos + 1:]
if is_valid_arch(maybe_arch):
arch = maybe_arch
binary = binary[0:colon_pos]
if arch == "":
arch = guess_arch(addr)
if frameno_str == '0':
# Assume that frame #0 is the first frame of new stack trace.
self.frame_no = 0
original_binary = binary
if self.binary_path_filter:
binary = self.binary_path_filter(binary)
symbolized_line = self.symbolize_address(addr, binary, offset, arch)
if not symbolized_line:
if original_binary != binary:
symbolized_line = self.symbolize_address(addr, original_binary,
offset, arch)
if not symbolized_line:
symbolized_crash_stacktrace += u'%s\n' % self.current_line
else:
for symbolized_frame in symbolized_line:
symbolized_crash_stacktrace += u'%s\n' % (
' #' + str(self.frame_no) + ' ' + symbolized_frame.rstrip())
self.frame_no += 1
# Close any left-over open pipes.
for pipe in pipes:
pipe.stdin.close()
pipe.stdout.close()
pipe.kill()
return symbolized_crash_stacktrace
def filter_binary_path(binary_path):
"""Filters binary path to provide a local copy."""
platform = environment.platform()
if platform == 'ANDROID':
# Skip symbolization when running it on bad entries like [stack:XYZ].
if not binary_path.startswith('/') or '(deleted)' in binary_path:
return ''
# Initialize some helper variables.
binary_filename = os.path.basename(binary_path)
build_directory = environment.get_value('BUILD_DIR')
symbols_directory = environment.get_value('SYMBOLS_DIR')
# Try to find the library in the build directory first.
local_binary_path = utils.find_binary_path(build_directory, binary_path)
if local_binary_path:
return local_binary_path
# We didn't find the library locally in the build directory.
# Try finding the library in the local system library cache.
download_system_symbols_if_needed(symbols_directory)
local_binary_path = utils.find_binary_path(symbols_directory, binary_path)
if local_binary_path:
return local_binary_path
# Try pulling in the binary directly from the device into the
# system library cache directory.
local_binary_path = os.path.join(symbols_directory, binary_filename)
adb.run_command('pull %s %s' % (binary_path, local_binary_path))
if os.path.exists(local_binary_path):
return local_binary_path
# Unable to find library.
logs.log_error('Unable to find library %s for symbolization.' % binary_path)
return ''
if platform == 'CHROMEOS':
# FIXME: Add code to pull binaries from ChromeOS device.
return binary_path
if environment.is_chromeos_system_job():
# This conditional is True for ChromeOS system fuzzers that are running on
# Linux. Ensure that the binary is always looked for in the chroot and not
# in system directories.
build_dir = environment.get_value('BUILD_DIR')
if not binary_path.startswith(build_dir):
# Fixup path so |binary_path| points to a binary in the chroot (probably
# a system library).
return os.path.join(build_dir, binary_path[1:])
# For Linux and Mac, the binary exists locally. No work to do,
# just return the same binary path.
return binary_path
def symbolize_stacktrace(unsymbolized_crash_stacktrace,
enable_inline_frames=True):
"""Symbolize a crash stacktrace."""
if environment.is_trusted_host():
from bot.untrusted_runner import symbolize_host
return symbolize_host.symbolize_stacktrace(unsymbolized_crash_stacktrace,
enable_inline_frames)
platform = environment.platform()
if platform == 'WINDOWS':
# Windows Clang ASAN provides symbolized stacktraces anyway.
return unsymbolized_crash_stacktrace
if platform == 'FUCHSIA':
# Fuchsia Clang ASAN provides symbolized stacktraces anyway.
return unsymbolized_crash_stacktrace
# FIXME: Support symbolization on ChromeOS device.
if platform == 'CHROMEOS':
return unsymbolized_crash_stacktrace
# Initialize variables.
global llvm_symbolizer_path
global pipes
global stack_inlining
global symbolizers
pipes = []
stack_inlining = str(enable_inline_frames).lower()
symbolizers = {}
# Make sure we have a llvm symbolizer for this platform.
llvm_symbolizer_path = environment.get_llvm_symbolizer_path()
if not llvm_symbolizer_path:
return unsymbolized_crash_stacktrace
# Disable buffering for stdout.
disable_buffering()
loop = SymbolizationLoop(
binary_path_filter=filter_binary_path,
dsym_hint_producer=chrome_dsym_hints)
symbolized_crash_stacktrace = loop.process_stacktrace(
unsymbolized_crash_stacktrace)
return symbolized_crash_stacktrace
def download_system_symbols_if_needed(symbols_directory):
"""Download system libraries from |SYMBOLS_URL| and cache locally."""
# For local testing, we do not have access to the cloud storage bucket with
# the symbols. In this case, just bail out.
if environment.get_value('LOCAL_DEVELOPMENT'):
return
# When running reproduce tool locally, we do not have access to the cloud
# storage bucket with the symbols. In this case, just bail out.
if environment.get_value('REPRODUCE_TOOL'):
return
# We have archived symbols for google builds only.
if not settings.is_google_device():
return
# Get the build fingerprint parameters.
build_params = settings.get_build_parameters()
if not build_params:
logs.log_error('Unable to determine build parameters.')
return
build_id = build_params.get('build_id')
target = build_params.get('target')
type = build_params.get('type')
if not build_id or not target or not type:
logs.log_error('Null build parameters found, exiting.')
return
# Check if we already have the symbols in cache.
build_params_check_path = os.path.join(symbols_directory,
'.cached_build_params')
cached_build_params = utils.read_data_from_file(
build_params_check_path, eval_data=True)
if cached_build_params and cached_build_params == build_params:
# No work to do, same system symbols already in cache.
return
symbols_archive_filename = '%s-symbols-%s.zip' % (target, build_id)
symbols_archive_path = os.path.join(symbols_directory,
symbols_archive_filename)
# Delete existing symbols directory first.
shell.remove_directory(symbols_directory, recreate=True)
# Fetch symbol file from cloud storage cache (if available).
found_in_cache = storage.get_file_from_cache_if_exists(
symbols_archive_path, update_modification_time_on_access=False)
if not found_in_cache:
# Include type and sanitizer information in the target.
target_with_type_and_san = '%s-%s' % (target, type)
tool_suffix = environment.get_value('SANITIZER_TOOL_NAME')
if tool_suffix and not tool_suffix in target_with_type_and_san:
target_with_type_and_san += '_%s' % tool_suffix
# Fetch the artifact now.
fetch_artifact.get(build_id, target_with_type_and_san,
symbols_archive_filename, symbols_directory)
if not os.path.exists(symbols_archive_path):
logs.log_error(
'Unable to locate symbols archive %s.' % symbols_archive_path)
return
# Store the artifact for later use or for use by other bots.
storage.store_file_in_cache(symbols_archive_path)
archive.unpack(symbols_archive_path, symbols_directory, trusted=True)
shell.remove_file(symbols_archive_path)
utils.write_data_to_file(build_params, build_params_check_path)
|
py | b41628a0e85645e59fca27bd33e6b9f089a7ff07 | import asyncio
import cProfile
import logging
import pathlib
from beet.util.path import mkdir, path_from_root
# to use the profiler, enable it config file, "enable_profiler"
# the output will be printed to your beet root path, e.g. ~/.beet/mainnet/profile/
# to analyze the profile, run:
# python beet/utils/profiler.py ~/.beet/mainnet/profile | less -r
# this will print CPU usage of the beet full node main thread at 1 second increments.
# find a time window of interest and analyze the profile file (which are in pstats format).
# for example:
# python beet/utils/profiler.py ~/.beet/mainnet/profile 10 20
async def profile_task(root_path: pathlib.Path, service: str, log: logging.Logger) -> None:
profile_dir = path_from_root(root_path, f"profile-{service}")
log.info("Starting profiler. saving to %s" % profile_dir)
mkdir(profile_dir)
counter = 0
while True:
pr = cProfile.Profile()
pr.enable()
# this will throw CancelledError when we're exiting
await asyncio.sleep(1)
pr.create_stats()
pr.dump_stats(profile_dir / ("slot-%05d.profile" % counter))
log.debug("saving profile %05d" % counter)
counter += 1
if __name__ == "__main__":
import sys
import pstats
import io
from colorama import init, Fore, Back, Style
from subprocess import check_call
profile_dir = pathlib.Path(sys.argv[1])
init(strip=False)
def analyze_cpu_usage(profile_dir: pathlib.Path):
counter = 0
try:
while True:
f = io.StringIO()
st = pstats.Stats(str(profile_dir / ("slot-%05d.profile" % counter)), stream=f)
st.strip_dirs()
st.sort_stats(pstats.SortKey.CUMULATIVE)
st.print_stats()
f.seek(0)
total = 0.0
sleep = 0.0
# output looks like this:
# ncalls tottime percall cumtime percall filename:lineno(function)
# 1 0.000 0.000 0.000 0.000 <function>
for line in f:
if " function calls " in line and " in " in line and " seconds":
# 304307 function calls (291692 primitive calls) in 1.031 seconds
assert total == 0
total = float(line.split()[-2])
continue
columns = line.split(None, 5)
if len(columns) < 6 or columns[0] == "ncalls":
continue
# TODO: to support windows and MacOS, extend this to a list of function known to sleep the process
# e.g. WaitForMultipleObjects or kqueue
if "{method 'poll' of 'select.epoll' objects}" in columns[5]:
# cumulative time
sleep += float(columns[3])
if sleep < 0.000001:
percent = 100.0
else:
percent = 100.0 * (total - sleep) / total
if percent > 90:
color = Fore.RED + Style.BRIGHT
elif percent > 80:
color = Fore.MAGENTA + Style.BRIGHT
elif percent > 70:
color = Fore.YELLOW + Style.BRIGHT
elif percent > 60:
color = Style.BRIGHT
elif percent < 10:
color = Fore.GREEN
else:
color = ""
quantized = int(percent // 2)
print(
("%05d: " + color + "%3.0f%% CPU " + Back.WHITE + "%s" + Style.RESET_ALL + "%s|")
% (counter, percent, " " * quantized, " " * (50 - quantized))
)
counter += 1
except Exception as e:
print(e)
def analyze_slot_range(profile_dir: pathlib.Path, first: int, last: int):
if last < first:
print("ERROR: first must be <= last when specifying slot range")
return
files = []
for i in range(first, last + 1):
files.append(str(profile_dir / ("slot-%05d.profile" % i)))
output_file = "beet-hotspot-%d" % first
if first < last:
output_file += "-%d" % last
print("generating call tree for slot(s) [%d, %d]" % (first, last))
check_call(["gprof2dot", "-f", "pstats", "-o", output_file + ".dot"] + files)
with open(output_file + ".png", "w+") as f:
check_call(["dot", "-T", "png", output_file + ".dot"], stdout=f)
print("output written to: %s.png" % output_file)
if len(sys.argv) == 2:
# this analyzes the CPU usage at all slots saved to the profiler directory
analyze_cpu_usage(profile_dir)
elif len(sys.argv) in [3, 4]:
# the additional arguments are interpreted as either one slot, or a
# slot range (first and last) to analyze
first = int(sys.argv[2])
last = int(sys.argv[3]) if len(sys.argv) == 4 else first
analyze_slot_range(profile_dir, first, last)
else:
print(
"""USAGE:
profiler.py <profile-directory>
Analyze CPU usage at each 1 second interval from the profiles in the specified
directory. Print colored timeline to stdout
profiler.py <profile-directory> <slot>
profiler.py <profile-directory> <first-slot> <last-slot>
Analyze a single slot, or a range of time slots, from the profile directory
"""
)
|
py | b41629c1d020804d6c8aff4cb0e12956c81a39a5 | #!/usr/bin/env python
#
# ___INFO__MARK_BEGIN__
#######################################################################################
# Copyright 2016-2021 Univa Corporation (acquired and owned by Altair Engineering Inc.)
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#######################################################################################
# ___INFO__MARK_END__
#
import re
from uge.exceptions.object_not_found import ObjectNotFound
from uge.exceptions.invalid_request import InvalidRequest
from uge.exceptions.object_already_exists import ObjectAlreadyExists
from .list_based_object_manager import ListBasedObjectManager
class OperatorManager(ListBasedObjectManager):
QCONF_ERROR_REGEX_LIST = [
(re.compile('.*already exists.*'), ObjectAlreadyExists),
(re.compile('.*does not exist.*'), ObjectNotFound),
(re.compile('.*may not remove.*'), InvalidRequest),
]
OBJECT_NAME = 'operator'
OBJECT_CLASS_UGE_NAME = 'o'
def __init__(self, qconf_executor):
ListBasedObjectManager.__init__(self, qconf_executor)
#############################################################################
# Testing.
if __name__ == '__main__':
pass
|
py | b4162a0ca01e9ac83b641cb9d000b9c7790dc730 | class Solution:
def maxCount(self, m, n, ops):
"""
:type m: int
:type n: int
:type ops: List[List[int]]
:rtype: int
"""
if not ops:
return m * n
return min(op[0] for op in ops) * min(op[1] for op in ops)
|
py | b4162ac39dacfccdd55b041dd156a4ebc43907ba | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'eugene'
'''
MIT License
Copyright (c) 2015 Eugene Grobbelaar (email : [email protected])
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
'''
Step 1) Load template files to memory
Step 2) Search and replace these tags in memory (including filenames).
<<<NAMESPACE>>>
<<<STATEMACHINENAME>>> or <<<CLASSNAME>>>
<<<AUTHOR>>>
Step 3) Search for the following pairs of tags
<<<PER_STATE_BEGIN>>>
<<<PER_STATE_END>>>
<<<PER_EVENT_BEGIN>>>
<<<PER_EVENT_END>>>
<<<PER_ACTION_BEGIN>>>
<<<PER_ACTION_END>>>
<<<PER_ACTION_SIGNATURE_BEGIN>>>
<<<PER_ACTION_SIGNATURE_END>>>
<<<PER_GUARD_BEGIN>>>
<<<PER_GUARD_END>>>
and duplicate the following for each item, replacing each tag with the item name
<<<STATENAME>>>
<<<EVENTNAME>>>
<<<ACTIONNAME>>>
<<<GUARDNAME>>>
These need to be expanded for event structs
<<<EVENTSIGNATURE>>>
<<<EVENTMEMBERSINSTANTIATE>>>
<<<EVENTMEMBERSDECLARE>>>
When looping <<<ALPH>>> should increment from a through Z.
When looping <<<NUM>>> should increment from 1 through 10000.
When reading the transition table, first state name (top, left) should be set to the value for this tag : <<<STATE_0>>>
Then, the transition table needs to go here, following the rules.
<<<TTT_BEGIN>>>
<<<TTT_END>>>
or
<<<TTT_LITE_BEGIN>>>
<<<TTT_LITE_END>>>
or
<<<TTT_LITE_SML_BEGIN>>>
<<<TTT_LITE_SML_END>>>
# EMBEDDED SM SUPPORT.
Step 4) In each <<PER_XXX tag, there might be more expansion required. The following tags apply in this pass
<<<PER_EVENT_CURRENT_NEXT_STATE_BEGIN>>>
<<<PER_EVENT_NEXT_STATE_END>>>
and the following replacement tags will be correctly set
<<<EVENTSTATECURRENT>>>
<<<EVENTSTATENEXT>>>
Also, the original SM only allows a single state-based action to happen.
I want there to be several actions allowed in a State, based on several events valid in that state.
These tags provide for that.
<<<PER_STATE_ACTION_EVENT_BEGIN>>>
<<<PER_STATE_ACTION_EVENT_END>>>
and the following replacement tags will be correctly set
<<<PER_STATE_ACTION>>>
<<<PER_STATE_EVENT>>>
# END EMBEDDED SM SUPPORT.
'''
__TAG_AUTHOR__ = '<<<AUTHOR>>>'
__TAG_GROUP__ = '<<<GROUP>>>'
__TAG_BRIEF__ = '<<<BRIEF>>>'
__TAG_NAMESPACE__ = '<<<NAMESPACE>>>'
__TAG_SM_NAME__ = '<<<STATEMACHINENAME>>>'
__TAG_SM_NAME_UPPER__ = '<<<STATEMACHINENAMEUPPER>>>'
__TAG_CLASS_NAME__ = '<<<CLASSNAME>>>'
__TAG_PyIFGen_NAME__ = '<<<PYIFGENNAME>>>'
__TAG_PS_BEGIN__ = "<<<PER_STATE_BEGIN>>>"
__TAG_PS_END__ = "<<<PER_STATE_END>>>"
__TAG_PE_BEGIN__ = "<<<PER_EVENT_BEGIN>>>"
__TAG_PE_END__ = "<<<PER_EVENT_END>>>"
__TAG_PA_BEGIN__ = "<<<PER_ACTION_BEGIN>>>"
__TAG_PA_END__ = "<<<PER_ACTION_END>>>"
__TAG_PASIG_BEGIN__ = "<<<PER_ACTION_SIGNATURE_BEGIN>>>"
__TAG_PASIG_END__ = "<<<PER_ACTION_SIGNATURE_END>>>"
__TAG_PG_BEGIN__ = "<<<PER_GUARD_BEGIN>>>"
__TAG_PG_END__ = "<<<PER_GUARD_END>>>"
__TAG_EVENT_SIGNATURE__ = "<<<EVENTSIGNATURE>>>"
__TAG_EVENT_MEMBERINST__ = "<<<EVENTMEMBERSINSTANTIATE>>>"
__TAG_LITE_EVENT_MEMBERINST__ = "<<<EVENTMEMBERSLITEINSTANTIATE>>>"
__TAG_EVENT_MEMBERDECL__ = "<<<EVENTMEMBERSDECLARE>>>"
__TAG_STATENAME__ = '<<<STATENAME>>>'
__TAG_EVENTNAME__ = '<<<EVENTNAME>>>'
__TAG_EVENTNAME_SMALL_CAMEL__ = '<<<EVENTNAMESMALLCAMEL>>>'
__TAG_ACTIONNAME__ = '<<<ACTIONNAME>>>'
__TAG_GUARDNAME__ = '<<<GUARDNAME>>>'
__TAG_ABC__ = '<<<ALPH>>>'
__TAG_123__ = '<<<NUM>>>'
__TAG_INIT_STATE__ = '<<<STATE_0>>>'
__TAG_TTT_BEGIN__ = '<<<TTT_BEGIN>>>'
__TAG_TTT_END___ = '<<<TTT_END>>>'
__TAG_TTT_LITE_BEGIN__ = '<<<TTT_LITE_BEGIN>>>'
__TAG_TTT_LITE_END__ = '<<<TTT_LITE_END>>>'
__TAG_TTT_LITE_SML_BEGIN__ = '<<<TTT_LITE_SML_BEGIN>>>'
__TAG_TTT_LITE_SML_END__ = '<<<TTT_LITE_SML_END>>>'
__TAG_DECLSPEC_DLL_EXPORT__ = "<<<DLL_EXPORT>>>"
# EMBEDDED SM SUPPORT.
__TAG_EVENT_CURNEX_ST_BEG__ = "<<<PER_EVENT_CURRENT_NEXT_STATE_BEGIN>>>"
__TAG_EVENT_CURNEX_ST_END__ = "<<<PER_EVENT_NEXT_STATE_END>>>"
__TAG_EVENT_ST_CUR__ = "<<<EVENTSTATECURRENT>>>"
__TAG_EVENT_ST_NXT__ = "<<<EVENTSTATENEXT>>>"
__TAG_PSAE_BEGIN__ = "<<<PER_STATE_ACTION_EVENT_BEGIN>>>"
__TAG_PSAE_END__ = "<<<PER_STATE_ACTION_EVENT_END>>>"
__TAG_PSAE_ACTION__ = "<<<PER_STATE_ACTION>>>"
__TAG_PSAE_EVENT__ = "<<<PER_STATE_EVENT>>>"
# END EMBEDDED SM SUPPORT.
# Python2 -> 3 shennanigans...try support both
try:
from interface_base import * # py2
except (ModuleNotFoundError, ImportError) as e:
from .interface_base import * # py3
try:
from .preservative import *
except (ModuleNotFoundError, ImportError) as e:
from preservative import *
try:
from .cgen import CBASEGenerator, CCodeModel, alpha, __getnextalphabet__, __resetalphabet__, even_space, FileCopyUtil, caps, camel_case_small, camel_case
except (ModuleNotFoundError, ImportError) as e:
from cgen import CBASEGenerator, CCodeModel, alpha, __getnextalphabet__, __resetalphabet__, even_space, FileCopyUtil, caps, camel_case_small, camel_case
try:
from LanguageCPP import LanguageCPP
except (ModuleNotFoundError, ImportError) as e:
from .LanguageCPP import LanguageCPP
# Model that describes a state machine.
class CStateMachineModel:
def __init__(self):
self.statemachinename = ""
self.namespacename = ""
self.declspecdllexport = ""
self.pythoninterfacegeneratorfilename = ""
self.states = []
self.actions = []
self.events = []
self.guards = []
# EMBEDDED SM SUPPORT.
self.event_transitions_per_state = {} # ['event', ['next state,current state' , ...]]
self.actionevents_per_state = {} # ['state', [['event', 'action'] , ...]
# END EMBEDDED SM SUPPORT.
self.actionsignatures = OrderedDict()
# Transition Table Model uses State Machine Model to generate all code required for a working state machine.
class CTransitionTableModel(CStateMachineModel):
START_STATE = 0
EVENT = 1
NEXT_STATE = 2
ACTION = 3
GUARD = 4
def __init__(self, tt, nn, smn, dclspc = ""):
CStateMachineModel.__init__(self)
self.transition_table = tt
self.statemachinename = smn
self.namespacename = nn
self.declspecdllexport = dclspc
tstate = OrderedDict()
taction = OrderedDict()
tevent = OrderedDict()
tguard = OrderedDict()
# EMBEDDED SM SUPPORT. ['current state, event', 'next state']
tevent_transitions_tmp = {}
# END EMBEDDED SM SUPPORT.
# Filter
for tableline in self.transition_table:
if tableline[self.START_STATE] != "" and tableline[self.START_STATE].lower() != "none":
tstate[tableline[self.START_STATE]] = 0
if tableline[self.NEXT_STATE] != "" and tableline[self.NEXT_STATE].lower() != "none":
tstate[tableline[self.NEXT_STATE]] = 0
if tableline[self.EVENT] != "" and tableline[self.EVENT].lower() != "none":
tevent[tableline[self.EVENT]] = 0
# EMBEDDED SM SUPPORT. ['current state, event', 'next state']
'''
if tableline[self.NEXT_STATE] == "" or tableline[self.NEXT_STATE].lower() == "none":
raise Exception('Events that dont change state should re-enter the current state.\nPlease fix your transition table')
tevent_transitions_tmp[tableline[self.START_STATE] + ',' + tableline[self.EVENT]] = tableline[self.NEXT_STATE]
TODO : For the case below, how to support a different 'action' on the in-state-event???? Ie that event might have gotten the machine
to this state with a particular action, but perhaps the user has configured a different action for this event in-state???
'''
if tableline[self.NEXT_STATE] == "" or tableline[self.NEXT_STATE].lower() == "none":
tevent_transitions_tmp[tableline[self.START_STATE] + ',' + tableline[self.EVENT]] = tableline[self.START_STATE]
else:
tevent_transitions_tmp[tableline[self.START_STATE] + ',' + tableline[self.EVENT]] = tableline[self.NEXT_STATE]
# This is for in-state-actions based on events...
if tableline[self.ACTION] != "" and tableline[self.ACTION].lower() != "none":
if not (tableline[self.START_STATE] in self.actionevents_per_state):
self.actionevents_per_state[tableline[self.START_STATE]] = []
self.actionevents_per_state[tableline[self.START_STATE]].append([tableline[self.EVENT], tableline[self.ACTION]])
# END EMBEDDED SM SUPPORT.
if tableline[self.ACTION] != "" and tableline[self.ACTION].lower() != "none":
taction[tableline[self.ACTION]] = 0
if not ((tableline[self.ACTION] + tableline[self.EVENT]) in self.actionsignatures):
self.actionsignatures[tableline[self.ACTION] + tableline[self.EVENT]] = (tableline[self.ACTION], tableline[self.EVENT]) #, tableline[self.START_STATE],tableline[self.NEXT_STATE]))
if tableline[self.GUARD] != "" and tableline[self.GUARD].lower() != "none":
tguard[tableline[self.GUARD]] = 0
# Populate CStateMachineModel
for s in tstate:
self.states.append(s)
for e in tevent:
self.events.append(e)
for a in taction:
self.actions.append(a)
for g in tguard:
self.guards.append(g)
# EMBEDDED SM SUPPORT.
for e in tevent:
self.event_transitions_per_state[e] = []
for s in tstate:
key = s+','+e
if key in tevent_transitions_tmp:
self.event_transitions_per_state[e].append([tevent_transitions_tmp[key], s])
else:
self.event_transitions_per_state[e].append(['EVENT_IGNORED', s])
# END EMBEDDED SM SUPPORT.
def __getfirststate__(self):
if not self.transition_table:
return "NO TT PRESENT!"
return self.transition_table[0][0]
class CStateMachineGenerator(CBASEGenerator):
def __init__(self, inputfiledir, outputfiledir, events_interface=None, language=None, author='Anonymous', group='', brief=''):
CBASEGenerator.__init__(self,inputfiledir,outputfiledir,language, author, group, brief)
self.events_interface = events_interface
def __loadtemplates_firstfiltering__(self, smmodel):
"""
See baseclass implementation. This just prepares the dictionary of things to replace
for this type of codegeneration.
@param smmodel:
@return: cgen.CCodeModel, a dictionary -> {filename,[lines]}
"""
dict_to_replace_lines = {}
dict_to_replace_lines[__TAG_SM_NAME_UPPER__] = caps(smmodel.statemachinename)
dict_to_replace_lines[__TAG_SM_NAME__] = smmodel.statemachinename
dict_to_replace_lines[__TAG_CLASS_NAME__] = smmodel.statemachinename
dict_to_replace_lines[__TAG_PyIFGen_NAME__] = smmodel.pythoninterfacegeneratorfilename.replace('.py', '') # hack : for tcpgen simple templates,
if not dict_to_replace_lines[__TAG_PyIFGen_NAME__]:
dict_to_replace_lines[__TAG_PyIFGen_NAME__] = self.vpp_filename
dict_to_replace_lines[__TAG_NAMESPACE__] = smmodel.namespacename
dict_to_replace_lines[__TAG_AUTHOR__] = self.author
dict_to_replace_lines[__TAG_GROUP__] = self.group
dict_to_replace_lines[__TAG_BRIEF__] = self.brief
dict_to_replace_lines[__TAG_DECLSPEC_DLL_EXPORT__] = smmodel.declspecdllexport
dict_to_replace_filenames = {}
dict_to_replace_filenames["TEMPLATE_"] = smmodel.statemachinename
#dict_to_replace_filenames['.ty'] = '.py'
#dict_to_replace_filenames['.t#'] = '.cs'
#dict_to_replace_filenames['.t'] = '.h'
#dict_to_replace_filenames['.hpp'] = '.cpp' # there are no '.hpp' templates...but search and replace will apply '.t -> .h' first so '.tpp' becomes '.hpp'...grrr
return CBASEGenerator.__loadtemplates_firstfiltering__(self,dict_to_replace_lines,dict_to_replace_filenames)
def __get_event_signature__(self,name):
if self.events_interface is None or self.language is None:
return ""
for s in self.events_interface.Structs():
if s.Name == name:
return self.language.ParameterString(self.language.GetFactoryCreateParams(s, self.events_interface))
return ""
def __instantiate_event_struct_member(self, name, whitespace_cnt, is_ptr=True, instancename="data"):
if self.events_interface is None or self.language is None:
return ""
for s in self.events_interface.Structs():
if s.Name == name:
guts = self.language.InstantiateStructMembers(s, self.events_interface, '', instancename, self.language.Accessor(is_ptr))
result = ''
cnt = 0
for g in guts:
result = result + (whitespace_cnt*' ' if cnt > 0 else '') + g + '\n'
cnt = cnt + 1
return result
return ""
def __declare_event_struct_members(self, name, whitespace_cnt):
if self.events_interface is None or self.language is None:
return ""
for s in self.events_interface.Structs():
if s.Name == name:
guts = self.language.DeclareStructMembers(s, self.events_interface, '', False)
result = ''
cnt = 0
for g in guts:
result = result + ((whitespace_cnt+1)*' ' if cnt > 0 else ' ') + g + '\n'
cnt = cnt + 1
# remove last '\n'
result = result[:-1]
return result
return ""
def hasTag(self, line, tag):
return line.find(tag.replace("<<<", "").replace(">>>", "")) > 0
def hasMemberName(self, a):
return a.find("::") > 0
def extractMemberNameAndTag(self, a):
member = a[a.find("::"):a.find(">>>")].replace("::", "")
tag = a.strip()
return [tag, member]
def __innerexpand__secondfiltering__(self, names2x, lines2x, puthere):
global alpha
__resetalphabet__()
cnt = 0
for name in names2x:
for line in lines2x:
newline = line
newline = newline.replace(__TAG_STATENAME__, name)
newline = newline.replace(__TAG_EVENTNAME_SMALL_CAMEL__, camel_case_small(name))
newline = newline.replace(__TAG_EVENTNAME__, name)
newline = newline.replace(__TAG_ACTIONNAME__, name)
newline = newline.replace(__TAG_GUARDNAME__, name)
newline = newline.replace(__TAG_ABC__, chr(alpha))
newline = newline.replace(__TAG_123__, str(cnt))
# EMBEDDED SM SUPPORT.
newline = newline.replace(__TAG_EVENT_CURNEX_ST_BEG__, __TAG_EVENT_CURNEX_ST_BEG__ + '<<<' + name + '>>>') # put a marker (event name) for mapping
newline = newline.replace(__TAG_PSAE_BEGIN__, __TAG_PSAE_BEGIN__ + '<<<' + name + '>>>') # put a marker (state name) for mapping
# END EMBEDDED SM SUPPORT.
tabcnt = newline.count(' ')
newline = newline.replace(__TAG_EVENT_SIGNATURE__, self.__get_event_signature__(name))
# __TAG_EVENT_MEMBERINST__ -> PTR
if self.hasTag(newline,__TAG_EVENT_MEMBERINST__) and self.hasMemberName(newline):
line_member = self.extractMemberNameAndTag(newline)
newline = newline.replace(line_member[0],self.__instantiate_event_struct_member(name, tabcnt, True, line_member[1]))
else:
newline = newline.replace(__TAG_EVENT_MEMBERINST__, self.__instantiate_event_struct_member(name, tabcnt, True)) # PTR
# __TAG_LITE_EVENT_MEMBERINST__ -> NO PTR
if self.hasTag(newline,__TAG_LITE_EVENT_MEMBERINST__) and self.hasMemberName(newline):
line_member = self.extractMemberNameAndTag(newline)
newline = newline.replace(line_member[0],self.__instantiate_event_struct_member(name, tabcnt, False, line_member[1]))
else:
newline = newline.replace(__TAG_LITE_EVENT_MEMBERINST__, self.__instantiate_event_struct_member(name, tabcnt, False)) # NO PTR
newline = newline.replace(__TAG_EVENT_MEMBERDECL__, self.__declare_event_struct_members(name, tabcnt))
# END EMBEDDED SUPPORT
puthere.append(newline)
cnt = cnt + 1
__getnextalphabet__()
def __innerexpand_actionsignatures__(self, states2x, lines2x, puthere):
global alpha
__resetalphabet__()
cnt = 0
for key, (actionname, eventname) in states2x.items():
if eventname == "" or eventname.lower() == 'none':
eventname = "NONE"
elif eventname.lower() == 'any':
eventname = "ANY"
for line in lines2x:
puthere.append(line
.replace(__TAG_ACTIONNAME__, actionname)
.replace(__TAG_EVENTNAME_SMALL_CAMEL__, camel_case_small(eventname))
.replace(__TAG_EVENTNAME__, eventname)
.replace(__TAG_ABC__, chr(alpha))
.replace(__TAG_123__, str(cnt)))
cnt = cnt + 1
__getnextalphabet__()
def __transitiontable_replace_NONE__(self, val):
if val == "" or val.lower() == 'none':
val = "msmf::none"
return val
def __transitiontableLITE_guard_replace_NONE__(self, val):
tmp_val = val.replace('__', '')
if tmp_val == "" or tmp_val.lower() == 'none':
val = "boost::msm::gnone"
return val
def __transitiontableLITE_action_replace_NONE__(self, val):
tmp_val = val.replace('__', '')
if tmp_val == "" or tmp_val.lower() == 'none' or tmp_val.lower().find('::none<') > -1:
val = "boost::msm::none"
return val
''' This SM doesnt seem to allow 'none' transitions -> make it transition to the source state'''
def __transitiontableLITE_nextstate_replace_NONE__(self, val, source_state):
tmp_val = val.replace('__', '')
tmp_val = tmp_val.replace('msmf::', '')
if tmp_val == "" or tmp_val.lower() == 'none':
val = source_state
return val
def __expand_secondfiltering__(self, smmodel, cmmodel):
for file in cmmodel.filenames_to_lines:
ex_state = False
ex_event = False
ex_action = False
ex_actionsig = False
ex_guard = False
ex_tt = False
ex_tt_lite = False
ex_tt_lite_sml = False
snipped_to_expand = []
alllinesexpanded = []
for line in cmmodel.filenames_to_lines[file]:
begin = line.find(__TAG_PS_BEGIN__) > -1 or \
line.find(__TAG_PE_BEGIN__) > -1 or \
line.find(__TAG_PA_BEGIN__) > -1 or \
line.find(__TAG_PASIG_BEGIN__) > -1 or \
line.find(__TAG_PG_BEGIN__) > -1 or \
line.find(__TAG_TTT_BEGIN__) > -1 or \
line.find(__TAG_TTT_LITE_BEGIN__) > -1 or \
line.find(__TAG_TTT_LITE_SML_BEGIN__) > -1
ex_state = line.find(__TAG_PS_BEGIN__) > -1 or ex_state
ex_event = line.find(__TAG_PE_BEGIN__) > -1 or ex_event
ex_action = line.find(__TAG_PA_BEGIN__) > -1 or ex_action
ex_actionsig = line.find(__TAG_PASIG_BEGIN__) > -1 or ex_actionsig
ex_guard = line.find(__TAG_PG_BEGIN__) > -1 or ex_guard
ex_tt = line.find(__TAG_TTT_BEGIN__) > -1 or ex_tt
ex_tt_lite = line.find(__TAG_TTT_LITE_BEGIN__) > -1 or ex_tt_lite
ex_tt_lite_sml = line.find(__TAG_TTT_LITE_SML_BEGIN__) > -1 or ex_tt_lite_sml
if not ex_state and not ex_event and not ex_action and not ex_actionsig and not ex_guard and not ex_tt and not ex_tt_lite and not ex_tt_lite_sml:
alllinesexpanded.append(line.replace(__TAG_INIT_STATE__, smmodel.__getfirststate__()))
if ex_state and line.find(__TAG_PS_END__) > -1:
self.__innerexpand__secondfiltering__(smmodel.states, snipped_to_expand, alllinesexpanded)
snipped_to_expand = []
ex_state = False
if ex_event and line.find(__TAG_PE_END__) > -1:
self.__innerexpand__secondfiltering__(smmodel.events, snipped_to_expand, alllinesexpanded)
snipped_to_expand = []
ex_event = False
if ex_action and line.find(__TAG_PA_END__) > -1:
self.__innerexpand__secondfiltering__(smmodel.actions, snipped_to_expand, alllinesexpanded)
snipped_to_expand = []
ex_action = False
if ex_actionsig and line.find(__TAG_PASIG_END__) > -1:
self.__innerexpand_actionsignatures__(smmodel.actionsignatures, snipped_to_expand, alllinesexpanded)
snipped_to_expand = []
ex_actionsig = False
if ex_guard and line.find(__TAG_PG_END__) > -1:
self.__innerexpand__secondfiltering__(smmodel.guards, snipped_to_expand, alllinesexpanded)
snipped_to_expand = []
ex_guard = False
if ex_tt and line.find(__TAG_TTT_END___) > -1:
len_tt = len(smmodel.transition_table)
tt_out = " // " + len("msmf::Row < ") * ' ' + even_space("Start") + even_space("Event") + even_space("Next") + even_space("Action") + even_space("Guard") + '\n'
for i, ttline in enumerate(smmodel.transition_table):
tt_out += ' msmf::Row < '
tt_out += even_space(self.__transitiontable_replace_NONE__(ttline[smmodel.START_STATE])) + ','
tt_out += even_space(self.__transitiontable_replace_NONE__(ttline[smmodel.EVENT] )) + ','
tt_out += even_space(self.__transitiontable_replace_NONE__(ttline[smmodel.NEXT_STATE] )) + ','
tt_out += even_space(self.__transitiontable_replace_NONE__(ttline[smmodel.ACTION] )) + ','
tt_out += even_space(self.__transitiontable_replace_NONE__(ttline[smmodel.GUARD] )) + '> '
if i != len_tt-1:
tt_out += ","
tt_out += " // " + str(i) + '\n'
alllinesexpanded.append(tt_out)
tt_out = ""
ex_tt = False
if ex_tt_lite and line.find(__TAG_TTT_LITE_END__) > -1:
tt_out = " // " + even_space("Start + ") + even_space("Event") + even_space("[ Guard ] ") + even_space("/ Action") + even_space(" = Next") + '\n'
startStateHasEntryExit = {}
for i, ttline in enumerate(smmodel.transition_table):
if i == 0: # initial state
tt_out += " *"
else:
tt_out += " , "
tt_out += even_space(self.__transitiontable_replace_NONE__(ttline[smmodel.START_STATE])) + '+'
tt_out += even_space('event<' + self.__transitiontable_replace_NONE__(ttline[smmodel.EVENT]) + ">") + ' '
tt_out += even_space('['+self.__transitiontableLITE_guard_replace_NONE__('__'+ttline[smmodel.GUARD])+']') + ' / '
tt_out += even_space(self.__transitiontableLITE_action_replace_NONE__('__'+ttline[smmodel.ACTION]))
if ttline[smmodel.NEXT_STATE].lower() != 'none': # to not get transitions into/outof state on actions that dont change the state...
tt_out += ' = ' + even_space(self.__transitiontableLITE_nextstate_replace_NONE__(ttline[smmodel.NEXT_STATE], ttline[smmodel.START_STATE]))
tt_out += '\n'
alllinesexpanded.append(tt_out)
tt_out = ""
# State entry/exit, once only
if not (ttline[smmodel.START_STATE] in startStateHasEntryExit):
startStateHasEntryExit[ttline[smmodel.START_STATE]] = True
tt_out += " , "+ttline[smmodel.START_STATE]+" + msm::on_entry / __" + ttline[smmodel.START_STATE] + 'OnEntry\n'
tt_out += " , "+ttline[smmodel.START_STATE]+" + msm::on_exit / __" + ttline[smmodel.START_STATE] + 'OnExit'
tt_out += '\n'
alllinesexpanded.append(tt_out)
tt_out = ""
ex_tt_lite = False
if ex_tt_lite_sml and line.find(__TAG_TTT_LITE_SML_END__) > -1:
tt_out = " // " + even_space("Start + ") + even_space("Event") + even_space("[ Guard ] ") + even_space("/ Action", 100) + even_space(" = Next") + '\n'
startStateHasEntryExit = {}
for i, ttline in enumerate(smmodel.transition_table):
if i == 0: # initial state
tt_out += " *"
else:
tt_out += " , "
tt_out += even_space(self.__transitiontable_replace_NONE__(ttline[smmodel.START_STATE])) + '+'
tt_out += even_space('event<' + self.__transitiontable_replace_NONE__(ttline[smmodel.EVENT]) + ">") + ' '
tt_out += even_space('['+self.__transitiontableLITE_guard_replace_NONE__('__'+ttline[smmodel.GUARD])+']') + ' / '
#tt_out += even_space(self.__transitiontableLITE_action_replace_NONE__('call(this,&CONCRETE::' + ttline[smmodel.ACTION] + '<' + ttline[smmodel.EVENT] + ">)"), 100)
tt_out += even_space(self.__transitiontableLITE_action_replace_NONE__('__' + ttline[smmodel.ACTION]), 100)
if ttline[smmodel.NEXT_STATE].lower() != 'none': # to not get transitions into/outof state on actions that dont change the state...
tt_out += ' = ' + even_space(self.__transitiontableLITE_nextstate_replace_NONE__(ttline[smmodel.NEXT_STATE], ttline[smmodel.START_STATE]))
tt_out += '\n'
alllinesexpanded.append(tt_out)
tt_out = ""
# State entry/exit, once only
if not (ttline[smmodel.START_STATE] in startStateHasEntryExit):
startStateHasEntryExit[ttline[smmodel.START_STATE]] = True
tt_out += " , "+ttline[smmodel.START_STATE]+" + msm::on_entry<_> / __" + ttline[smmodel.START_STATE] + 'OnEntry\n'
tt_out += " , "+ttline[smmodel.START_STATE]+" + msm::on_exit<_> / __" + ttline[smmodel.START_STATE] + 'OnExit'
tt_out += '\n'
alllinesexpanded.append(tt_out)
tt_out = ""
ex_tt_lite_sml = False
if (ex_state or ex_event or ex_action or ex_actionsig or ex_guard or ex_tt or ex_tt_lite or ex_tt_lite_sml) and not begin:
snipped_to_expand.append(line)
cmmodel.filenames_to_lines[file] = alllinesexpanded
# EMBEDDED SM SUPPORT.
def __innerexpand__thirdfiltering__eventtransitionsperstate(self, namesmap3x, lines3x, puthere):
global alpha
__resetalphabet__()
cnt = 0
# First find the mapping marker
for _map in namesmap3x:
currentstate = _map[1]
nextstate = _map[0]
for line in lines3x:
#puthere.append(line.replace(__TAG_ABC__, chr(alpha)).replace(__TAG_123__, str(cnt)))
puthere.append(line.replace(__TAG_EVENT_ST_CUR__, currentstate).replace(__TAG_EVENT_ST_NXT__, nextstate).replace(__TAG_ABC__, chr(alpha)).replace(__TAG_123__, str(cnt)))
cnt = cnt + 1
__getnextalphabet__()
# this function is pretty much the same as the one above...
def __innerexpand__thirdfiltering__eventactionsperstate(self, namesmap3x, lines3x, puthere):
global alpha
__resetalphabet__()
cnt = 0
# First find the mapping marker
for _map in namesmap3x:
action = _map[1]
event = _map[0]
for line in lines3x:
# puthere.append(line.replace(__TAG_ABC__, chr(alpha)).replace(__TAG_123__, str(cnt)))
puthere.append(line.replace(__TAG_PSAE_ACTION__, action).replace(__TAG_PSAE_EVENT__, event).replace(__TAG_ABC__, chr(alpha)).replace(__TAG_123__, str(cnt)))
cnt = cnt + 1
__getnextalphabet__()
def __expand_thirdfiltering__(self, smmodel, cmmodel):
for file in cmmodel.filenames_to_lines:
ex_state = False
ex_event = False
#ex_action = False
#ex_guard = False
snippet_to_expand = []
alllinesexpanded = []
state_action_map = ''
event_map = ''
for line in cmmodel.filenames_to_lines[file]:
begin = line.find(__TAG_EVENT_CURNEX_ST_BEG__) > -1 or line.find(__TAG_PSAE_BEGIN__) > -1 #or line.find(__TAG_PA_BEGIN__) > -1 or line.find(__TAG_PG_BEGIN__) > -1
if begin:
event_map = line.replace(__TAG_EVENT_CURNEX_ST_BEG__, '').replace('<<<', '').replace('>>>', '').replace('\t', '').replace('\n', '').replace(" ","")
state_action_map = line.replace(__TAG_PSAE_BEGIN__, '').replace('<<<', '').replace('>>>', '').replace('\t', '').replace('\n', '').replace(" ","")
end_event = (line.find(__TAG_EVENT_CURNEX_ST_END__) > -1)
end_state = (line.find(__TAG_PSAE_END__) > -1)
ex_state = line.find(__TAG_PSAE_BEGIN__) > -1 or ex_state
ex_event = line.find(__TAG_EVENT_CURNEX_ST_BEG__) > -1 or ex_event
#ex_action = line.find(__TAG_PA_BEGIN__) > -1 or ex_action
#ex_guard = line.find(__TAG_PG_BEGIN__) > -1 or ex_guard
#if not ex_state and not ex_event and not ex_action and not ex_guard:
# alllinesexpanded.append(line.replace(__TAG_INIT_STATE__, smmodel.__getfirststate__()))
if ex_state and line.find(__TAG_PSAE_END__) > -1:
if state_action_map in smmodel.actionevents_per_state:
self.__innerexpand__thirdfiltering__eventactionsperstate(smmodel.actionevents_per_state[state_action_map], snippet_to_expand, alllinesexpanded)
snippet_to_expand = []
ex_state = False
if ex_event and line.find(__TAG_EVENT_CURNEX_ST_END__) > -1:
self.__innerexpand__thirdfiltering__eventtransitionsperstate(smmodel.event_transitions_per_state[event_map], snippet_to_expand, alllinesexpanded)
snippet_to_expand = []
ex_event = False
#if ex_action and line.find(__TAG_PA_END__) > -1:
# self.__innerexpand__thirdfiltering__(smmodel.actions, snippet_to_expand, alllinesexpanded)
# snippet_to_expand = []
# ex_action = False
#if ex_guard and line.find(__TAG_PG_END__) > -1:
# self.__innerexpand__thirdfiltering__(smmodel.guards, snippet_to_expand, alllinesexpanded)
# snippet_to_expand = []
# ex_guard = False
#if (ex_state or ex_event or ex_action or ex_guard) and not begin:
if (ex_event or ex_state) and not begin:
snippet_to_expand.append(line)
elif not begin and not end_event and not end_state: # Unlike the second pass, this needs to preserve what was done there...
alllinesexpanded.append(line)
cmmodel.filenames_to_lines[file] = alllinesexpanded
# END EMBEDDED SM SUPPORT.
''' Used for State Machine Generation
'''
def Generate(self, transitiontable, namespacenname, statemachinename, dclspc="", copyotherfiles = True):
print("*************************************")
print("******* SMGen ***********************")
print("*************************************")
print(" Output Dir : " + self.output_gen_file_dir)
print(" State Machine: " + statemachinename)
print(" Executing in : " + os.path.realpath(__file__))
print("*************************************")
sm = CTransitionTableModel(transitiontable, namespacenname, statemachinename, dclspc)
cm = self.__loadtemplates_firstfiltering__(sm)
self.__expand_secondfiltering__(sm, cm)
# EMBEDDED SM SUPPORT.
self.__expand_thirdfiltering__(sm, cm)
# END EMBEDDED SM SUPPORT.
# Preserve user tags.
self.__preserve_usertags_in_files__(cm)
'''
# Round-trip Code Preservation. Will load the code to preserve upon creation (if the output dir is not-empty/the same as the one in the compile path).
preservation = Preservative(self.output_gen_file_dir)
preservation.Emplace(cm.filenames_to_lines)
'''
# Write output to file.
self.__createoutput__(cm.filenames_to_lines)
# Copy non-autogenerated required files to output.
if isinstance(self.language, LanguageCPP) and copyotherfiles:
# Files...
files_to_copy = []
files_to_copy.append("allocator.h")
files_to_copy.append("allocator.cpp")
files_to_copy.append("basetypes.h")
files_to_copy.append("CMakeLists.txt")
files_to_copy.append("Fault.h")
files_to_copy.append("Fault.cpp")
files_to_copy.append("stl_allocator.h")
files_to_copy.append("thread_FreeRTOS.h")
files_to_copy.append("thread_FreeRTOS.cpp")
files_to_copy.append("threaded_dispatcher.h")
files_to_copy.append("threaded_dispatcher_FreeRTOS.h")
files_to_copy.append("threadsafe_queue.h")
files_to_copy.append("threadsafe_queue_FreeRTOS.h")
files_to_copy.append("waitcondition.h")
files_to_copy.append("waitcondition.cpp")
files_to_copy.append("xallocator.h")
files_to_copy.append("xallocator.cpp")
files_to_copy.append("xlist.h")
files_to_copy.append("xmap.h")
files_to_copy.append("xqueue.h")
files_to_copy.append("xset.h")
files_to_copy.append("xsstream.h")
files_to_copy.append("xstring.h")
allplatformsfrom = os.path.join(os.path.abspath(os.path.dirname(__file__)), os.path.join("allplatforms", "CPP"))
allplatformsto = os.path.join(os.path.abspath(self.output_gen_file_dir), "allplatforms")
FileCopyUtil(allplatformsfrom, allplatformsto, files_to_copy)
# Boost SML ...
smlfrom = os.path.join(allplatformsfrom, os.path.join("sml", os.path.join("include","boost")))
smlto = os.path.join(allplatformsto, "boost")
smlfiles_to_copy = []
smlfiles_to_copy.append("sml.hpp")
FileCopyUtil(smlfrom, smlto, smlfiles_to_copy)
# Tests...
testfiles_to_copy = []
testfiles_to_copy.append("CMakeLists.txt")
testfiles_to_copy.append("Test.ThreadingConcepts.cpp")
testfiles_to_copy.append("test_main.cpp")
tests_allplatformsfrom = os.path.join(allplatformsfrom, "testsuite")
tests_allplatformsto = os.path.join(allplatformsto, "testsuite")
FileCopyUtil(tests_allplatformsfrom, tests_allplatformsto, testfiles_to_copy)
# Micro Unit Test Framework
microunit_files_to_copy = []
microunit_files_to_copy.append("minunit.h")
microunit_files_to_copy.append("minunit.cpp")
microunit_allplatformsfrom = os.path.join(tests_allplatformsfrom, "minunit")
microunit_allplatformsto = os.path.join(tests_allplatformsto, "minunit")
FileCopyUtil(microunit_allplatformsfrom, microunit_allplatformsto, microunit_files_to_copy)
''' Used for Protocol Generation
'''
def GenerateProtocol(self, pythoninterfacegeneratorfilename, namespacenname, classname, dclspc="", preserve_dir=""):
sm = CTransitionTableModel([], namespacenname, classname, dclspc)
sm.pythoninterfacegeneratorfilename = pythoninterfacegeneratorfilename
cm = self.__loadtemplates_firstfiltering__(sm)
self.__expand_secondfiltering__(sm, cm)
# Round-trip Code Preservation. Will load the code to preserve upon creation (if the output dir is not-empty/the same as the one in the compile path).
# TCP gen might have a different output directory (typically COG will put files into an intermediate dir, and them copy them elsewhere
preservation = None
if preserve_dir == "":
preservation = Preservative(self.output_gen_file_dir)
else:
preservation = Preservative(preserve_dir)
preservation.Emplace(cm.filenames_to_lines)
# Write output to file.
self.__createoutput__(cm.filenames_to_lines)
# return the filenames
filenames = []
for filename in cm.filenames_to_lines.keys():
filenames.append(filename)
return filenames
|
py | b4162b365b25580d5b974b0a4f8f922fb143bcb7 | #!/usr/bin/env python
import asyncio
import logging
import time
import aiohttp
import traceback
import pandas as pd
import hummingbot.connector.exchange.digifinex.digifinex_constants as constants
from typing import Optional, List, Dict, Any
from hummingbot.core.data_type.order_book import OrderBook
from hummingbot.core.data_type.order_book_message import OrderBookMessage
from hummingbot.core.data_type.order_book_tracker_data_source import OrderBookTrackerDataSource
from hummingbot.core.utils.async_utils import safe_gather
from hummingbot.logger import HummingbotLogger
from . import digifinex_utils
from .digifinex_active_order_tracker import DigifinexActiveOrderTracker
from .digifinex_order_book import DigifinexOrderBook
from .digifinex_websocket import DigifinexWebsocket
# from .digifinex_utils import ms_timestamp_to_s
class DigifinexAPIOrderBookDataSource(OrderBookTrackerDataSource):
MAX_RETRIES = 20
MESSAGE_TIMEOUT = 30.0
SNAPSHOT_TIMEOUT = 10.0
_logger: Optional[HummingbotLogger] = None
@classmethod
def logger(cls) -> HummingbotLogger:
if cls._logger is None:
cls._logger = logging.getLogger(__name__)
return cls._logger
def __init__(self, trading_pairs: List[str] = None):
super().__init__(trading_pairs)
self._trading_pairs: List[str] = trading_pairs
self._snapshot_msg: Dict[str, any] = {}
@classmethod
async def get_last_traded_prices(cls, trading_pairs: List[str]) -> Dict[str, float]:
result = {}
async with aiohttp.ClientSession() as client:
resp = await client.get(f"{constants.REST_URL}/ticker")
resp_json = await resp.json()
for t_pair in trading_pairs:
last_trade = [o["last"] for o in resp_json["ticker"] if o["symbol"] ==
digifinex_utils.convert_to_exchange_trading_pair(t_pair)]
if last_trade and last_trade[0] is not None:
result[t_pair] = last_trade[0]
return result
@staticmethod
async def fetch_trading_pairs() -> List[str]:
async with aiohttp.ClientSession() as client:
async with client.get(f"{constants.REST_URL}/ticker", timeout=10) as response:
if response.status == 200:
from hummingbot.connector.exchange.digifinex.digifinex_utils import \
convert_from_exchange_trading_pair
try:
data: Dict[str, Any] = await response.json()
return [convert_from_exchange_trading_pair(item["symbol"]) for item in data["ticker"]]
except Exception:
pass
# Do nothing if the request fails -- there will be no autocomplete for kucoin trading pairs
return []
@staticmethod
async def get_order_book_data(trading_pair: str) -> Dict[str, any]:
"""
Get whole orderbook
"""
async with aiohttp.ClientSession() as client:
orderbook_response = await client.get(
f"{constants.REST_URL}/order_book?limit=150&symbol="
f"{digifinex_utils.convert_to_exchange_trading_pair(trading_pair)}"
)
if orderbook_response.status != 200:
raise IOError(
f"Error fetching OrderBook for {trading_pair} at {constants.EXCHANGE_NAME}. "
f"HTTP status is {orderbook_response.status}."
)
orderbook_data: List[Dict[str, Any]] = await safe_gather(orderbook_response.json())
orderbook_data = orderbook_data[0]
return orderbook_data
async def get_new_order_book(self, trading_pair: str) -> OrderBook:
snapshot: Dict[str, Any] = await self.get_order_book_data(trading_pair)
snapshot_timestamp: float = time.time()
snapshot_msg: OrderBookMessage = DigifinexOrderBook.snapshot_message_from_exchange(
snapshot,
snapshot_timestamp,
metadata={"trading_pair": trading_pair}
)
order_book = self.order_book_create_function()
active_order_tracker: DigifinexActiveOrderTracker = DigifinexActiveOrderTracker()
bids, asks = active_order_tracker.convert_snapshot_message_to_order_book_row(snapshot_msg)
order_book.apply_snapshot(bids, asks, snapshot_msg.update_id)
return order_book
async def listen_for_trades(self, ev_loop: asyncio.BaseEventLoop, output: asyncio.Queue):
"""
Listen for trades using websocket trade channel
"""
while True:
try:
ws = DigifinexWebsocket()
await ws.connect()
await ws.subscribe("trades", list(map(
lambda pair: f"{digifinex_utils.convert_to_ws_trading_pair(pair)}",
self._trading_pairs
)))
async for response in ws.on_message():
params = response["params"]
symbol = params[2]
for trade in params[1]:
trade_timestamp: int = trade["time"]
trade_msg: OrderBookMessage = DigifinexOrderBook.trade_message_from_exchange(
trade,
trade_timestamp,
metadata={"trading_pair": digifinex_utils.convert_from_ws_trading_pair(symbol)}
)
output.put_nowait(trade_msg)
except asyncio.CancelledError:
raise
except Exception:
self.logger().error("Unexpected error.", exc_info=True)
await asyncio.sleep(5.0)
finally:
await ws.disconnect()
async def listen_for_order_book_diffs(self, ev_loop: asyncio.BaseEventLoop, output: asyncio.Queue):
"""
Listen for orderbook diffs using websocket book channel
"""
while True:
try:
ws = DigifinexWebsocket()
await ws.connect()
await ws.subscribe("depth", list(map(
lambda pair: f"{digifinex_utils.convert_to_ws_trading_pair(pair)}",
self._trading_pairs
)))
async for response in ws.on_message():
if response is None or 'params' not in response:
continue
params = response["params"]
symbol = params[2]
order_book_data = params[1]
timestamp: int = int(time.time())
if params[0] is True:
orderbook_msg: OrderBookMessage = DigifinexOrderBook.snapshot_message_from_exchange(
order_book_data,
timestamp,
metadata={"trading_pair": digifinex_utils.convert_from_ws_trading_pair(symbol)}
)
else:
orderbook_msg: OrderBookMessage = DigifinexOrderBook.diff_message_from_exchange(
order_book_data,
timestamp,
metadata={"trading_pair": digifinex_utils.convert_from_ws_trading_pair(symbol)}
)
output.put_nowait(orderbook_msg)
except asyncio.CancelledError:
raise
except Exception:
self.logger().network(
"Unexpected error with WebSocket connection.",
exc_info=True,
app_warning_msg="Unexpected error with WebSocket connection. Retrying in 30 seconds. "
"Check network connection."
)
await asyncio.sleep(30.0)
finally:
await ws.disconnect()
async def listen_for_order_book_snapshots(self, ev_loop: asyncio.BaseEventLoop, output: asyncio.Queue):
"""
Listen for orderbook snapshots by fetching orderbook
"""
while True:
try:
for trading_pair in self._trading_pairs:
try:
snapshot: Dict[str, any] = await self.get_order_book_data(trading_pair)
snapshot_timestamp: int = snapshot["date"]
snapshot_msg: OrderBookMessage = DigifinexOrderBook.snapshot_message_from_exchange(
snapshot,
snapshot_timestamp,
metadata={"trading_pair": trading_pair}
)
output.put_nowait(snapshot_msg)
self.logger().debug(f"Saved order book snapshot for {trading_pair}")
# Be careful not to go above API rate limits.
await asyncio.sleep(5.0)
except asyncio.CancelledError:
raise
except Exception as e:
self.logger().network(
f"Unexpected error with WebSocket connection: {e}",
exc_info=True,
app_warning_msg="Unexpected error with WebSocket connection. Retrying in 5 seconds. "
"Check network connection.\n"
+ traceback.format_exc()
)
await asyncio.sleep(5.0)
this_hour: pd.Timestamp = pd.Timestamp.utcnow().replace(minute=0, second=0, microsecond=0)
next_hour: pd.Timestamp = this_hour + pd.Timedelta(hours=1)
delta: float = next_hour.timestamp() - time.time()
await asyncio.sleep(delta)
except asyncio.CancelledError:
raise
except Exception:
self.logger().error("Unexpected error.", exc_info=True)
await asyncio.sleep(5.0)
|
py | b4162ba08ccbb6692b1f05e319fd103914ff1337 | class FineTune_Funcs:
'''
This class store all kinds of modifier functions.
'''
def __init__(self,dvh_stat_calc,IMRT_TABLE,Flag,OAR_preferences):
self.dvh_stat_calc = dvh_stat_calc # store the current calculation DVH statistics criteria and discrepancy between protocols
self.IMRT_Constraints = IMRT_TABLE # all the variables stored in IMRT_TABLE
self.Mark = Flag # mark current Monaco TPS state
self.OARs_prefer = OAR_preferences # mark the OARs should be protected especially OAR_preferences = ['Spinal Cord','Brain Stem','Larynx'...]
def _NPC_Modifier_V1(self):
'''
This is home developed automatic optimization module
##
input: inf(iso_effect,iso_constraint,weight,rel_imp,step_size)
step_size(the step for adjusting dose parameters)
diff_result(the evaluation index)
level_OARs(the OARs with level for us)
theshold(theshold set for optimization)
##
output: updated inf(iso_effect,iso_constraint,weight,rel_imp)
flag to mark whether target is overdose or underdose
希望采用自适应步长的方法进一步优化出比较好的结果
而且此 program 也只是针对三个靶区结构
此种写法目前只能针对每个器官对应一个评价指标,如果存在两个评价指标,此程序会报错
relative_impact :
1) +: 0-0.25
2) ++: 0.25-0.5
3) +++: 0.5-0.75
4) ++++: 0.75-1.0
##
这里没有才有input imrt constarints和evaluation indices一一对应来改变参数
这里大部分情况是多对一的情况,或者是多对多情况
'''
import math
IE = self.IMRT_Constraints['ISE']
IC = self.IMRT_Constraints['ISE']
W = self.IMRT_Constraints['ISE']
R = self.IMRT_Constraints['ISE']
Thres_Dose = self.IMRT_Constraints['thresholddose'] # specific for quadratic overdose
diff_result = {} ## 打一个补丁,将原来的diff换成新的
for key in diff.keys():
diff_result[key] = diff[key][0][1]
target = [diff_result[i] for i in tar_res_nam]
tar_sum = sum([math.floor(item) for item in target])
if tar_sum == len(tar_res_nam):
flag = 2 # indicate this functiion is good
print('Original parameters\n')
for key in IC.keys():
print(key,IC[key])
#################################
## please write your code here ##
#################################
if key in tar_res_nam or key == 'patient':
for i,item in enumerate(IC[key]):
## item = [type='qp',70]
if item[0] == 'type=o_q':
print(item)
if R[key][i][1] <= 0.5: ##(+,++)
IC[key][i][1] = IE[key][i][1]
elif R[key][i][1] <= 0.75: ##(+++)
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] = min((IC[key][i][1]+R[key][i][1]),IE[key][i][1])
else: ##(++++)
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IC[key][i][1]
else:
IC[key][i][1] = max((IC[key][i][1]+R[key][i][1]),IE[key][i][1])
else:
IC[key][i][1] = IC[key][i][1]
if key in level_OARs and level_OARs[key] == 1: # this indicate the stem and cord
for i in range(len(IE[key])):
if 'PRV' in key.upper():
if diff_result[key] > 1 : # 这是对PRV给劲
if R[key][i][1] >= 0.75 or W[key][i][1] >= 3: # 变量极其难压制
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = (IC[key][i][1]+IE[key][i][1])/2
else:
IC[key][i][1] += R[key][i][1]
elif R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,这里需要将他压下去
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = (IE[key][i][1] + IC[key][i][1])/2
else:
IC[key][i][1] -= round(diff_result[key],3)
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = (IC[key][i][1]+IE[key][i][1])/2
else:
IC[key][i][1] += round(diff_result[key],3)/3
elif diff_result[key] > 0.75: # 这是做计划次要矛盾
if R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,可以直接赋值isoeffect
## 其实这里会陷入问题,对于isoconstraint,isoeffect,哪一个小取哪一个
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] += round(diff_result[key],3)
elif R[key][i][1] <= 0.5 and W[key][i][1] <= 5 and R[key][i][1] > 0 and W[key][i][1] > 0: # 变量比较容易
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] += round(diff_result[key],3)/3
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IC[key][i][1]
else:
IC[key][i][1] = IE[key][i][1]
elif diff_result[key] > 0: # 这种属于基本满足情况
if R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,可以直接赋值isoeffect
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] = IC[key][i][1]
elif R[key][i][1] <= 0.5 and W[key][i][1] <= 3 and R[key][i][1] > 0 and W[key][i][1] > 0: # 考察一下权重和敏感度
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/3
else:
IC[key][i][1] = IC[key][i][1]
else:
if diff_result[key] > 1:# 这是对Stem&Cord的压制
if R[key][i][1] >= 0.75 or W[key][i][1] >= 5: # 变量极其难压制
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/2
elif R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,这里需要将他压下去
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] = IC[key][i][1]
elif diff_result[key] >= 0.75: # 这是对Stem&Cord的压制
if R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,可以直接赋值isoeffect
## 其实这里会陷入问题,对于isoconstraint,isoeffect,哪一个小取哪一个
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/2
elif R[key][i][1] < 0.75 and W[key][i][1] <= 3 and R[key][i][1] > 0 and W[key][i][1] > 0: # 变量比较容易
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)//3
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IC[key][i][1]
else:
IC[key][i][1] = IC[key][i][1] ##冻结
elif diff_result[key] > 0: # 这种属于肯定满足情况
if R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,可以直接赋值isoeffect
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] = IC[key][i][1]
elif R[key][i][1] < 0.75 and W[key][i][1] <= 3 and R[key][i][1] > 0 and W[key][i][1] > 0: # 考察一下权重和敏感度
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] = IC[key][i][1]
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IC[key][i][1]
else:
IC[key][i][1] = IE[key][i][1]
if key in level_OARs and level_OARs[key] == 2: # 这说明是视觉系统
for i in range(len(IE[key])):
if diff_result[key] > 1: # 这是做计划的主要矛盾
if R[key][i][1] >= 0.75 or W[key][i][1] > 5: # 变量比较难压制
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/2 # isoconstraint走diff_result/2步长
elif R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,这里需要将他压下去
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/2 # isoconstraint走diff_result步长
elif diff_result[key] >= 0.75: #这是做计划的次要矛盾
if R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,可以直接赋值isoeffect
## 其实这里会陷入问题,对于isoconstraint,isoeffect,哪一个小取哪一个
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/2
elif R[key][i][1] <= 0.75 and W[key][i][1] <= 3 and R[key][i][1] > 0 and W[key][i][1] > 0: # 变量比较容易
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] = IC[key][i][1]
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IC[key][i][1]
else:
IC[key][i][1] += R[key][i][1]
elif diff_result[key] > 0: # 这种属于肯定满足情况
if R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,可以直接赋值isoeffect
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/3
elif R[key][i][1] <= 0.5 and W[key][i][1] <= 3 and R[key][i][1] > 0 and W[key][i][1] > 0: # 考察一下权重和敏感度
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] = IC[key][i][1]
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IC[key][i][1]
else:
IC[key][i][1] += R[key][i][1]
if key in level_OARs and level_OARs[key] == 3: # this indicates parotids
for i in range(len(IE[key])):
# if i == 0: # 这是指的第一个serial函数
# IC[key][i][1] = IC[key][i][1]
#
# else:
if diff_result[key] > 1: # 这是做计划的主要矛盾
if R[key][i][1] >= 0.75 or W[key][i][1] > 5: # 变量比较难压制
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] = IC[key][i][1] # isoconstraint走diff_result/2步长
elif R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,这里需要将他压下去
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/3
elif diff_result[key] > 0.85: # 这是做计划次要矛盾
if R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,可以直接赋值isoeffect
## 其实这里会陷入问题,对于isoconstraint,isoeffect,哪一个小取哪一个
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/2
elif R[key][i][1] <= 0.75 and W[key][i][1] <= 3 and R[key][i][1] > 0 and W[key][i][1] > 0: # 变量比较容易
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/3
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IC[key][i][1]
else:
IC[key][i][1] = IC[key][i][1]
elif diff_result[key] > 0: # 这种属于肯定满足情况
if R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,可以直接赋值isoeffect
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] = IC[key][i][1]
elif R[key][i][1] <= 0.5 and W[key][i][1] <= 3 and R[key][i][1] > 0 and W[key][i][1] > 0: # 考察一下权重和敏感度
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/2
else:
IC[key][i][1] += R[key][i][1]
if key in level_OARs and level_OARs[key] == 4: # 这是剩余的organs
for i in range(len(IE[key])):
if diff_result[key] > 1: # 这是做计划的主要矛盾
if key in prior_name:
if R[key][i][1] >= 0.5 or W[key][i][1] >= 3:
# 变量比较难压制
if IC[key][i][1] < IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] += R[key][i][1]
else:
## 这表明这是很难压下去的
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] = IC[key][i][1]
else:
if R[key][i][1] >= 0.75 or W[key][i][1] > 5: # 变量比较难压制
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] = IC[key][i][1]
elif R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,这里需要将他压下去
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/2
elif diff_result[key] > 0.85: # 这是做计划次要矛盾
if R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,可以直接赋值isoeffect
## 其实这里会陷入问题,对于isoconstraint,isoeffect,哪一个小取哪一个
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)
elif R[key][i][1] <= 0.75 and W[key][i][1] <= 3 and R[key][i][1] > 0 and W[key][i][1] > 0: # 变量比较容易
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/2
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IC[key][i][1]
else:
IC[key][i][1] += R[key][i][1]
elif diff_result[key] > 0: # 这种属于肯定满足情况
if R[key][i][1] == 0 and W[key][i][1] == 0.01: # 僵尸指标,可以直接赋值isoeffect
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)
elif R[key][i][1] <= 0.5 and W[key][i][1] <= 3: # 考察一下权重和敏感度
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IE[key][i][1]
else:
IC[key][i][1] -= round(diff_result[key],3)/2
else:
if IC[key][i][1] > IE[key][i][1]:
IC[key][i][1] = IC[key][i][1]
else:
IC[key][i][1] += R[key][i][1]
else:
flag = 3 # indicate this function is not good !
print('Original parameters\n')
for key in IC.keys():
print(key,IC[key])
#################################
## Please write your code here ##
#################################
for key in R.keys():
for i in range(len(R[key])):
### 这里不管加号多少,上调的幅度和加号成线性关系,如果是R = 0
### 就不会改变
## 打一个补丁
# if key == 'Parotids' and i == 0:
#
# pass
# else:
if diff_result[key] < 0.6: ## 表示这个指标压得很低,但是牺牲了靶区,可以多增加一点量来补偿靶区
## 如果指标压的很低,但是relative_impact很小的话,其实不会增加很大的量
## 反而指标压的低,但是relative_impact很大的话,其实增加的量会比较大
IC[key][i][1] += R[key][i][1]
elif diff_result[key] < 0.85: ## 表示指标压得不算太低,但是也需要补偿一些靶区
IC[key][i][1] += R[key][i][1]/2
else: ## 表示刚刚压住,需要减小一部分来补偿靶区
IC[key][i][1] += R[key][i][1]/3
self.inf['ise'] = IE
self.inf['isc'] = IC
self.inf['wgt'] = W
self.inf['rlp'] = R
return self.inf,flag
def dvh_stat_ind_VeryBad_OARs(self,strt_name,dvh_index):
'''
strt_name: 'PGTVnd','Parotid L'
dvh_index: (0,['D50% < 3800 cGy', 23.7, 0.62368, '7'])
alpha = 0.05 is a small pertubation for dvh stat indices
DVH statistics indices evaluation criteria: OARs
1) VeryBad => jtem(diff_from_protocol) > 1.5
2) LittleBad => jtem(diff_from_protocol) > 1.0 - alpha & jtem(diff_from_protocol) <= 1.5
3) Ideal => jtem(diff_from_protocol) > 0.7 - alpha & jtem(diff_from_protocol) <= 1.0 - alpha
4) WellDone => jtem(diff_from_protocol) > 0.4 - alpha & jtem(diff_from_protocol) <= 0.7 - alpha
5) Perfect => jtem(diff_from_protocol) <= 0.4 - alpha
relative impact 0-0.25 +
0.25-0.5 ++
0.5-0.75 +++
0.75-1.0 ++++
######################################################################
######################################################################
if rlp <= 0.25 && weight <= 0.75:
1) isc > ise, isc_new = ise;
2) isc < ise, isc_new = isc - diff_ratio1
if rlp > 0.25 && rlp <= 0.5 && weight <= 0.75:
1) isc > ise, isc_new = ise;
2) isc < ise, isc_new = isc - diff_ratio1
if rlp > 0.5 && rlp <= 0.75 && weight <= 0.75:
1) isc > ise, isc_new = ise;
2) isc < ise, isc_new = isc - diff_ratio1
if rlp > 0.75 && rlp <= 1.00 && weight <= 0.75:
1) isc > ise, isc_new = ise;
2) isc < ise, isc_new = isc - diff_ratio1
#######################################################################
#######################################################################
#######################################################################
#######################################################################
if rlp <= 0.25 && weight <= 1.5 and weight > 0.75:
1) isc > ise, isc_new = isc*(1/diff_ratio);
2) isc < ise, isc_new = isc - diff_ratio1
if rlp > 0.25 && rlp <= 0.5 && weight <= 1.5 and weight > 0.75:
1) isc > ise, isc_new = isc*(1/diff_ratio);
2) isc < ise, isc_new = isc - diff_ratio1
if rlp > 0.5 && rlp <= 0.75 && weight <= 1.5 and weight > 0.75:
1) isc > ise, isc_new = isc*(1/diff_ratio);
2) isc < ise, isc_new = isc - diff_ratio1
if rlp > 0.75 && rlp <= 1.00 && weight <= 1.5 and weight > 0.75:
1) isc > ise, isc_new = isc*(1/diff_ratio);
2) isc < ise, isc_new = isc - diff_ratio1
#######################################################################
#######################################################################
#######################################################################
#######################################################################
if weight > 1.5:
1) isc > ise, isc_new = isc - diff_ratio1
#######################################################################
#######################################################################
'''
delta_RLP1, delta_RLP2, delta_RLP3, delta_RLP4 = 0.25,0.5,0.75,1
delta_WGT1, delta_WGT2 = 0.75, 1.5
# it's a very bad index of DVH
if strt_name in self.OARs_prefer:
# if OARs in preference list means need more constraints
if self.RLP[strt_name].shape != ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name].values[dvh_index] <= delta_RLP1 and self.WGT[strt_name].values[dvh_index] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP2 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP3 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP4 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].shape == ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name] <= delta_RLP1 and self.WGT[strt_name] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP2 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP3 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name].values: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP4 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
# if OARs not in preference list means need more constraints
if self.RLP[strt_name].shape != ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name].values[dvh_index] <= delta_RLP1 and self.WGT[strt_name].values[dvh_index] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP2 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP3 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP4 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].shape == ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name] <= delta_RLP1 and self.WGT[strt_name] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP2 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP3 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name].values: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP4 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
return self.IMRT_Constraints_Updated
def dvh_stat_ind_LittleBad_OARs(self,strt_name,dvh_index):
'''
alpha = 0.05 is a small pertubation for dvh stat indices
DVH statistics indices evaluation criteria: OARs
1) VeryBad => jtem(diff_from_protocol) > 1.5
2) LittleBad => jtem(diff_from_protocol) > 1.0 - alpha & jtem(diff_from_protocol) <= 1.5
3) Ideal => jtem(diff_from_protocol) > 0.7 - alpha & jtem(diff_from_protocol) <= 1.0 - alpha
4) WellDone => jtem(diff_from_protocol) > 0.4 - alpha & jtem(diff_from_protocol) <= 0.7 - alpha
5) Perfect => jtem(diff_from_protocol) <= 0.4 - alpha
'''
delta_RLP1, delta_RLP2, delta_RLP3, delta_RLP4 = 0.25,0.5,0.75,1
delta_WGT1, delta_WGT2 = 0.75, 1.5
# it's a very bad index of DVH
if strt_name in self.OARs_prefer:
# if OARs in preference list means need more constraints
if self.RLP[strt_name].shape != ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name].values[dvh_index] <= delta_RLP1 and self.WGT[strt_name].values[dvh_index] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP2 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP3 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP4 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].shape == ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name] <= delta_RLP1 and self.WGT[strt_name] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP2 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP3 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name].values: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP4 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
# if OARs not in preference list means need more constraints
if self.RLP[strt_name].shape != ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name].values[dvh_index] <= delta_RLP1 and self.WGT[strt_name].values[dvh_index] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP2 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP3 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP4 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].shape == ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name] <= delta_RLP1 and self.WGT[strt_name] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP2 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP3 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name].values: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP4 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
return self.IMRT_Constraints_Updated
def dvh_stat_ind_Ideal_OARs(self,strt_name,dvh_index):
'''
alpha = 0.05 is a small pertubation for dvh stat indices
DVH statistics indices evaluation criteria: OARs
1) VeryBad => jtem(diff_from_protocol) > 1.5
2) LittleBad => jtem(diff_from_protocol) > 1.0 - alpha & jtem(diff_from_protocol) <= 1.5
3) Ideal => jtem(diff_from_protocol) > 0.7 - alpha & jtem(diff_from_protocol) <= 1.0 - alpha
4) WellDone => jtem(diff_from_protocol) > 0.4 - alpha & jtem(diff_from_protocol) <= 0.7 - alpha
5) Perfect => jtem(diff_from_protocol) <= 0.4 - alpha
'''
delta_RLP1, delta_RLP2, delta_RLP3, delta_RLP4 = 0.25,0.5,0.75,1
delta_WGT1, delta_WGT2 = 0.75, 1.5
# it's a very bad index of DVH
if strt_name in self.OARs_prefer:
# if OARs in preference list means need more constraints
if self.RLP[strt_name].shape != ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
print('dvh_index:{}'.format(dvh_index))
print('dvh_index:{}'.format(dvh_index))
print('relative impact:{}'.format(self.RLP[strt_name].values))
if self.RLP[strt_name].values[dvh_index] <= delta_RLP1 and self.WGT[strt_name].values[dvh_index] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP2 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP3 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP4 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].shape == ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name] <= delta_RLP1 and self.WGT[strt_name] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP2 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP3 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name].values: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP4 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
# if OARs not in preference list means need more constraints
if self.RLP[strt_name].shape != ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
print('self.RLP[strt_name]:{}'.format(self.RLP[strt_name]))
print('self.RLP[strt_name].values:{}'.format(self.RLP[strt_name].values))
print('dvh_index:{}'.format(dvh_index))
if self.RLP[strt_name].values[dvh_index] <= delta_RLP1 and self.WGT[strt_name].values[dvh_index] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP2 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP3 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP4 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].shape == ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name] <= delta_RLP1 and self.WGT[strt_name] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP2 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP3 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name].values: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP4 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
return self.IMRT_Constraints_Updated
def dvh_stat_ind_WellDone_OARs(self,strt_name,dvh_index):
'''
alpha = 0.05 is a small pertubation for dvh stat indices
DVH statistics indices evaluation criteria: OARs
1) VeryBad => jtem(diff_from_protocol) > 1.5
2) LittleBad => jtem(diff_from_protocol) > 1.0 - alpha & jtem(diff_from_protocol) <= 1.5
3) Ideal => jtem(diff_from_protocol) > 0.7 - alpha & jtem(diff_from_protocol) <= 1.0 - alpha
4) WellDone => jtem(diff_from_protocol) > 0.4 - alpha & jtem(diff_from_protocol) <= 0.7 - alpha
5) Perfect => jtem(diff_from_protocol) <= 0.4 - alpha
'''
delta_RLP1, delta_RLP2, delta_RLP3, delta_RLP4 = 0.25,0.5,0.75,1
delta_WGT1, delta_WGT2 = 0.75, 1.5
# it's a very bad index of DVH
if strt_name in self.OARs_prefer:
# if OARs in preference list means need more constraints
if self.RLP[strt_name].shape != ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name].values[dvh_index] <= delta_RLP1 and self.WGT[strt_name].values[dvh_index] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP2 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP3 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP4 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].shape == ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name] <= delta_RLP1 and self.WGT[strt_name] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP2 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP3 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name].values: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP4 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
# if OARs not in preference list means need more constraints
if self.RLP[strt_name].shape != ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name].values[dvh_index] <= delta_RLP1 and self.WGT[strt_name].values[dvh_index] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP2 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP3 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP4 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].shape == ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name] <= delta_RLP1 and self.WGT[strt_name] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP2 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP3 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name].values: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP4 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
return self.IMRT_Constraints_Updated
def dvh_stat_ind_Perfect_OARs(self,strt_name,dvh_index):
'''
alpha = 0.05 is a small pertubation for dvh stat indices
DVH statistics indices evaluation criteria: OARs
1) VeryBad => jtem(diff_from_protocol) > 1.5 -
2) LittleBad => jtem(diff_from_protocol) > 1.0 - alpha & jtem(diff_from_protocol) <= 1.5 - alpha
3) Ideal => jtem(diff_from_protocol) > 0.7 - alpha & jtem(diff_from_protocol) <= 1.0 - alpha
4) WellDone => jtem(diff_from_protocol) > 0.4 - alpha & jtem(diff_from_protocol) <= 0.7 - alpha
5) Perfect => jtem(diff_from_protocol) <= 0.4 - alpha
'''
delta_RLP1, delta_RLP2, delta_RLP3, delta_RLP4 = 0.25,0.5,0.75,1
delta_WGT1, delta_WGT2 = 0.75, 1.5
# it's a very bad index of DVH
if strt_name in self.OARs_prefer:
# if OARs in preference list means need more constraints
if self.RLP[strt_name].shape != ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name].values[dvh_index] <= delta_RLP1 and self.WGT[strt_name].values[dvh_index] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP2 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP3 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP4 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].shape == ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name] <= delta_RLP1 and self.WGT[strt_name] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP2 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP3 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name].values: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP4 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
# if OARs not in preference list means need more constraints
if self.RLP[strt_name].shape != ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name].values[dvh_index] <= delta_RLP1 and self.WGT[strt_name].values[dvh_index] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP2 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP3 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].values[dvh_index] <= delta_RLP4 and self.WGT[strt_name].values[dvh_index] < delta_WGT1:
if self.ISC[strt_name].values[dvh_index] > self.ISE[strt_name].values[dvh_index]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISE[strt_name].values[dvh_index] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value[dvh_index] = self.ISC[strt_name].values[dvh_index] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name].shape == ():
# separate two types. e.g type(IMRT_TABLE['RLP'].loc['Lung'])== <class 'numpy.float64'>
# e.g type(IMRT_TABLE['RLP'].loc['BODY'])== <class 'pandas.core.series.Series'>
if self.RLP[strt_name] <= delta_RLP1 and self.WGT[strt_name] < delta_WGT1: # relative impact < + and weight < 0.5
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP2 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP3 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name].values: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
elif self.RLP[strt_name] <= delta_RLP4 and self.WGT[strt_name] < delta_WGT1:
if self.ISC[strt_name] > self.ISE[strt_name]: # isoconstraint > isoeffect
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISE[strt_name] # replace Isoconstraint with isoeffect
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
else:
temp_value = self.IMRT_Constraints_Updated.at[strt_name,'ISC']
temp_value = self.ISC[strt_name] - self.dvh_diff_ratio_l1 # indicate it's the diff_ratio
self.IMRT_Constraints_Updated.at[strt_name,'ISC'] = temp_value
return self.IMRT_Constraints_Updated
def _NPC_Modifier_V2(self):
'''
1) Check each strcuture DVH statistics e.g. Parotid L D50% < 30, diff = 0.7
'''
self.IMRT_Constraints_Updated = self.IMRT_Constraints
self.ISC = self.IMRT_Constraints['ISC']
self.ISE = self.IMRT_Constraints['ISE']
self.WGT = self.IMRT_Constraints['WGT']
self.RLP = self.IMRT_Constraints['RLP']
self.ThDose = self.IMRT_Constraints['thresholddose'] # specific for quadratic overdose
self.dvh_diff_ratio_l1, self.dvh_diff_ratio_l2, self.dvh_diff_ratio_l3, self.dvh_diff_ratio_l4 = 1.5, 1.0, 0.7, 0.4 # determine the boundary values
for item in self.dvh_stat_calc.keys():
# go through all the structure name
if 'pgtv' not in item.lower() and 'pctv' not in item.lower(): # indicate the OARs
for j,jtem in enumerate(self.dvh_stat_calc[item]): # go through each dvh statistics index
print('j,jtem:{},{}'.format(j,jtem))
if jtem[2] > self.dvh_diff_ratio_l1: # very bad
self.IMRT_Constraints_Updated = self.dvh_stat_ind_VeryBad_OARs(item,(j,j))
elif jtem[2] > self.dvh_diff_ratio_l2: # Little bad
self.IMRT_Constraints_Updated = self.dvh_stat_ind_LittleBad_OARs(item,j)
elif jtem[2] > self.dvh_diff_ratio_l3: # Ideal
self.IMRT_Constraints_Updated = self.dvh_stat_ind_Ideal_OARs(item,j)
elif jtem[2] > self.dvh_diff_ratio_l4: # WellDone
self.IMRT_Constraints_Updated = self.dvh_stat_ind_WellDone_OARs(item,j)
else: # Perfect
self.IMRT_Constraints_Updated = self.dvh_stat_ind_Perfect_OARs(item,j)
else:
# indicate the target region
for j,jtem in enumerate(self.dvh_stat_calc[item]): # go through each dvh statistics index
if jtem[2] > 1.0:
self.IMRT_Constraints_Updated = self.IMRT_Constraints_Updated
elif jtem[2] > 0.9:
self.IMRT_Constraints_Updated = self.IMRT_Constraints_Updated
mark = 2
return self.IMRT_Constraints_Updated,mark
|
py | b4162be5b3691d59451a97778f9cf0d55aa4d532 | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, division
import unittest
import numpy as np
import paddle
class TestComplexCastOp(unittest.TestCase):
def test_complex_to_real(self):
r = np.random.random(size=[10, 10]) * 10
i = np.random.random(size=[10, 10])
c_t = paddle.to_tensor(r + i * 1J, dtype='complex64')
self.assertEqual(c_t.cast('int64').dtype, paddle.int64)
self.assertEqual(c_t.cast('int32').dtype, paddle.int32)
self.assertEqual(c_t.cast('float32').dtype, paddle.float32)
self.assertEqual(c_t.cast('float64').dtype, paddle.float64)
self.assertEqual(c_t.cast('bool').dtype, paddle.bool)
self.assertTrue(
np.allclose(c_t.cast('int64').numpy(), r.astype('int64')))
self.assertTrue(
np.allclose(c_t.cast('int32').numpy(), r.astype('int32')))
self.assertTrue(
np.allclose(c_t.cast('float32').numpy(), r.astype('float32')))
self.assertTrue(
np.allclose(c_t.cast('float64').numpy(), r.astype('float64')))
self.assertTrue(np.allclose(c_t.cast('bool').numpy(), r.astype('bool')))
def test_real_to_complex(self):
r = np.random.random(size=[10, 10]) * 10
r_t = paddle.to_tensor(r)
self.assertEqual(r_t.cast('complex64').dtype, paddle.complex64)
self.assertEqual(r_t.cast('complex128').dtype, paddle.complex128)
self.assertTrue(np.allclose(r_t.cast('complex64').real().numpy(), r))
self.assertTrue(np.allclose(r_t.cast('complex128').real().numpy(), r))
def test_complex64_complex128(self):
r = np.random.random(size=[10, 10])
i = np.random.random(size=[10, 10])
c = r + i * 1J
c_64 = paddle.to_tensor(c, dtype='complex64')
c_128 = paddle.to_tensor(c, dtype='complex128')
self.assertTrue(c_64.cast('complex128').dtype, paddle.complex128)
self.assertTrue(c_128.cast('complex128').dtype, paddle.complex64)
self.assertTrue(
np.allclose(c_64.cast('complex128').numpy(), c_128.numpy()))
self.assertTrue(
np.allclose(c_128.cast('complex128').numpy(), c_64.numpy()))
if __name__ == '__main__':
unittest.main()
|
py | b4162cecce8c26172557b1c5c5c06672b05e6be7 | from flask import Flask, render_template
from flask_cors import CORS
from flask_restful import Resource, Api
from controller.buy import Buy
from controller.inventory import Inventory
from controller.items import Items
from controller.personal_inventory import PersonalInventory
from controller.quality import Quality
from controller.sell_in import Sell_in
from controller.update_quality import UpdateQuality
from controller.users import Users
from repository.db_engine import init_app
from services.service import Service
app = Flask(__name__)
CORS(app)
api = Api(app)
init_app(app)
class WelcomeOllivanders(Resource):
def get(self):
return {"Welcome": "Ollivanders"}
@app.route("/home")
def show_inventory():
items = Service.get_inventory()
return render_template("inventory.html", inventory=items)
api.add_resource(WelcomeOllivanders, "/")
api.add_resource(Inventory, "/inventory")
api.add_resource(Items, "/item/name/<name>", "/item")
api.add_resource(Quality, "/item/quality/<int:quality>")
api.add_resource(Sell_in, "/item/sell_in/<int:sell_in>")
api.add_resource(UpdateQuality, "/update_quality")
api.add_resource(Users, "/user")
api.add_resource(Buy, "/buy")
api.add_resource(PersonalInventory, "/user/inventory")
if __name__ == "__main__":
app.run(host="0.0.0.0", port=4000, debug=True)
|
py | b4162cff7d751bc368384c75e95b1f06f3b3b60d | from datetime import datetime
from glob import iglob as glob
import json
import os
import re
try:
from unittest2 import TestCase
except ImportError:
from unittest import TestCase
import parsley
import tomsbestfriend
THIS_DIR = os.path.dirname(__file__)
TESTS_DIR = os.path.join(THIS_DIR, "toml-test", "tests")
def load_tests(tests_dir=TESTS_DIR):
"""
Load the TOML Suite tests from the given directory.
"""
def valid(toml, output):
def test(self):
self.assertEqual(tomsbestfriend.loads(toml), output)
return test
def invalid(toml, name):
def test(self):
exception, msg = self.errors.get(name, (parsley.ParseError, None))
with self.assertRaises(exception) as e:
tomsbestfriend.loads(toml)
if msg is not None:
self.assertEqual(str(e.exception).replace("u'", "'"), msg)
return test
def reconvert(thing):
"""
Properly reconvert the values in the output.
"""
types = {
"bool" : {"true" : True, "false" : False}.get,
"datetime" : lambda d : datetime.strptime(d, "%Y-%m-%dT%H:%M:%SZ"),
"float" : float,
"integer" : int,
}
if "type" in thing:
return types.get(thing["type"], lambda i : i)(thing["value"])
return thing
def add_test_methods(test_class):
for path in glob(os.path.join(tests_dir, "*", "*.toml")):
name = re.sub(r"[\W ]+", "_", os.path.basename(path)[:-5])
expect = os.path.basename(os.path.dirname(path))
test_name = "_".join(("test", expect, name))
with open(path) as test_file:
if expect == "valid":
with open(os.path.splitext(path)[0] + ".json") as out_file:
output = json.load(out_file, object_hook=reconvert)
test = valid(test_file.read(), output)
elif expect == "invalid":
test = invalid(test_file.read(), test_name)
else:
raise ValueError("Didn't expect: %r" % (expect,))
test.__name__ = test_name
setattr(test_class, test_name, test)
return test_class
return add_test_methods
@load_tests()
class TestTOMLSuite(TestCase):
errors = {
"test_invalid_duplicate_keys" : (
tomsbestfriend.Duplicated,
"'dupe' already appears in the document.",
),
"test_invalid_duplicate_keygroups" : (
tomsbestfriend.Duplicated, "'a' already appears in the document.",
),
"test_invalid_duplicate_key_keygroup" : (
tomsbestfriend.Duplicated, "'type' already appears in 'fruit'.",
),
"test_invalid_array_mixed_types_arrays_and_ints" : (
TypeError, "[1, ['Arrays are not integers.']] is not homogeneous.",
),
"test_invalid_array_mixed_types_ints_and_floats" : (
TypeError, "[1, 1.0] is not homogeneous.",
),
"test_invalid_array_mixed_types_strings_and_ints" : (
TypeError, "['hi', 42] is not homogeneous.",
),
}
|
py | b4162d785032c5c119f17a78510598166cc97d48 | ### IMPORTS ###
import numpy as np
import pandas as pd
from sklearn.metrics import precision_score
### FUNCTIONS ###
# def authorise_search_old(pipeline, X):
# '''Authorises a search whenever there is a probability
# greater than 10% that the search will be successful.
# '''
# authorise = pipeline.predict_proba(X)[:,1] > 0.1
# return authorise
def authorise_search(pipeline, X, bias_correction=True):
'''Authorises a search whenever there is a probability
greater than 10% that the search will be successful.
'''
model_probas = pipeline.predict_proba(X)[:,1]
if bias_correction:
group_mean = pd.DataFrame({'ethnicity': X['ethnicity_officer'].values,
'sex': X['sex'].values,
'station': X['station'].values,
'proba': model_probas})\
.groupby(['ethnicity', 'sex']).transform('mean')
model_probas -= (group_mean.values.squeeze() - model_probas.mean())
authorise = model_probas > 0.1
return authorise
def min_max_range(data):
'''Retruns the range between the maximum and minimum values.'''
mmr = data.max()-data.min()
return mmr
def calculate_subgroup_precisions(y_pred, y_true, X,
grouping=['station', 'ethnicity', 'gender']):
'''Returns a dataframe with precision scores within subgroups.'''
# merge data & drop gender 'other'
df = pd.DataFrame(data={'station': X.station.values,
'ethnicity': X.ethnicity_officer.values,
'gender': X.sex.values,
'pred': y_pred,
'true': y_true})
df = df[df.gender != 'Other']
def truncated_precision(y_true, y_pred, min_values=30):
'''Returns the precision score if input data has more than 'min_values' rows.
Otherwise returns nan.
'''
if len(y_true) < min_values:
precision = np.nan
elif sum(y_pred) == 0:
precision = np.nan
else:
precision = precision_score(y_true, y_pred)
return precision
# calculate scores
df = df.groupby(grouping).apply(lambda x: truncated_precision(x.true, x.pred, min_values=30))
return df
def within_station_discrepancy(y_pred, y_true, X):
'''Returns a series with the maximum discrepancies within each police station.'''
subgroup_precisions = calculate_subgroup_precisions(y_pred, y_true, X,
grouping=['station', 'ethnicity', 'gender'])\
.unstack(['gender','ethnicity'])\
.T\
.apply(min_max_range)
return subgroup_precisions
def across_station_discrepancy(y_pred, y_true, X):
'''Returns the maximum discrepancy between stations.'''
station_precisions = calculate_subgroup_precisions(y_pred, y_true, X,
grouping=['station'])\
.to_frame()\
.apply(min_max_range)[0]
return station_precisions
def across_subgroup_discrepancy(y_pred, y_true, X):
'''Returns the maximum discrepancy between ['ethnicity', 'gender'] subgroups.'''
station_precisions = calculate_subgroup_precisions(y_pred, y_true, X,
grouping=['ethnicity', 'gender'])\
.to_frame()\
.apply(min_max_range)[0]
return station_precisions
|
py | b4162db2c160247b2829b6d5ce4db35f17586d56 | #!/usr/bin/env python3
#
# Copyright 2021 MultisampledNight
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Creates a CPU database for CPUs by intel and amd. They might not be exhaustive.
"""
import intel_ark
import helpers
import dogelog
if __name__ == "__main__":
dogelog.init()
# the old CPUs, using the product ID to avoid asking the server for an
# already known CPU
old_cpus = helpers.load_cpus()
old_cpus.sort(key=lambda cpu: cpu.product_id)
# parsing all needed CPUs... AAAAAAAA
cpus = intel_ark.parse(old_cpus)
# done, let's clean up and save
cpus.extend(old_cpus)
cpus.sort(key=lambda cpu: cpu.model)
helpers.save_cpus(cpus)
dogelog.info(f"Done with CPUs, saved to:\n{helpers.CPU_DATABASE}")
# vim:textwidth=80:
|
py | b4162ef1bc12fd1a809723a8ef0a3351b996577d | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Commands that can be used to fetch exploration related models.
All functions here should be agnostic of how ExplorationModel objects are
stored in the database. In particular, the various query methods should
delegate to the Exploration model class. This will enable the exploration
storage model to be changed without affecting this module and others above it.
"""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import copy
import logging
from core.domain import caching_services
from core.domain import exp_domain
from core.domain import subscription_services
from core.platform import models
import feconf
import python_utils
import utils
(exp_models,) = models.Registry.import_models([models.NAMES.exploration])
datastore_services = models.Registry.import_datastore_services()
def _migrate_states_schema(versioned_exploration_states, exploration_id):
"""Holds the responsibility of performing a step-by-step, sequential update
of an exploration states structure based on the schema version of the input
exploration dictionary. This is very similar to the YAML conversion process
found in exp_domain.py and, in fact, many of the conversion functions for
states are also used in the YAML conversion pipeline. If the current
exploration states schema version changes
(feconf.CURRENT_STATE_SCHEMA_VERSION), a new conversion
function must be added and some code appended to this function to account
for that new version.
Args:
versioned_exploration_states: dict. A dict with two keys:
- states_schema_version: int. the states schema version for the
exploration.
- states: the dict of states comprising the exploration. The keys in
this dict are state names.
exploration_id: str. ID of the exploration.
Raises:
Exception. The given states_schema_version is invalid.
"""
states_schema_version = versioned_exploration_states[
'states_schema_version']
if states_schema_version is None or states_schema_version < 1:
states_schema_version = 0
if not (0 <= states_schema_version
<= feconf.CURRENT_STATE_SCHEMA_VERSION):
raise Exception(
'Sorry, we can only process v1-v%d and unversioned exploration '
'state schemas at present.' %
feconf.CURRENT_STATE_SCHEMA_VERSION)
while (states_schema_version <
feconf.CURRENT_STATE_SCHEMA_VERSION):
exp_domain.Exploration.update_states_from_model(
versioned_exploration_states, states_schema_version,
exploration_id)
states_schema_version += 1
def get_new_exploration_id():
"""Returns a new exploration id.
Returns:
str. A new exploration id.
"""
return exp_models.ExplorationModel.get_new_id('')
def get_multiple_explorations_by_version(
exp_id, version_numbers, run_conversion=True):
"""Returns a list of Exploration domain objects corresponding to the
specified versions.
Args:
exp_id: str. ID of the exploration.
version_numbers: list(int). List of version numbers.
run_conversion: bool. When True, updates each Exploration version to
the latest states_schema_version if necessary.
Returns:
list(Exploration). List of Exploration domain objects.
Raises:
Exception. One or more of the given versions of the exploration could
not be converted to the latest schema version.
"""
explorations = []
exploration_models = exp_models.ExplorationModel.get_multi_versions(
exp_id, version_numbers)
error_versions = []
for index, exploration_model in enumerate(exploration_models):
try:
explorations.append(
get_exploration_from_model(
exploration_model, run_conversion=run_conversion))
except utils.ExplorationConversionError:
error_versions.append(version_numbers[index])
if error_versions:
raise Exception(
'Exploration %s, versions [%s] could not be converted to latest '
'schema version.'
% (exp_id, ', '.join(python_utils.MAP(str, error_versions))))
return explorations
def get_exploration_from_model(exploration_model, run_conversion=True):
"""Returns an Exploration domain object given an exploration model loaded
from the datastore.
If run_conversion is True, then the exploration's states schema version
will be checked against the current states schema version. If they do not
match, the exploration will be automatically updated to the latest states
schema version.
IMPORTANT NOTE TO DEVELOPERS: In general, run_conversion should never be
False. This option is only used for testing that the states schema version
migration works correctly, and it should never be changed otherwise.
Args:
exploration_model: ExplorationModel. An exploration storage model.
run_conversion: bool. When True, updates the exploration to the latest
states_schema_version if necessary.
Returns:
Exploration. The exploration domain object corresponding to the given
exploration model.
"""
# Ensure the original exploration model does not get altered.
versioned_exploration_states = {
'states_schema_version': exploration_model.states_schema_version,
'states': copy.deepcopy(exploration_model.states)
}
# If the exploration uses the latest states schema version, no conversion
# is necessary.
if (run_conversion and exploration_model.states_schema_version !=
feconf.CURRENT_STATE_SCHEMA_VERSION):
_migrate_states_schema(
versioned_exploration_states, exploration_model.id)
return exp_domain.Exploration(
exploration_model.id, exploration_model.title,
exploration_model.category, exploration_model.objective,
exploration_model.language_code, exploration_model.tags,
exploration_model.blurb, exploration_model.author_notes,
versioned_exploration_states['states_schema_version'],
exploration_model.init_state_name,
versioned_exploration_states['states'],
exploration_model.param_specs, exploration_model.param_changes,
exploration_model.version, exploration_model.auto_tts_enabled,
exploration_model.correctness_feedback_enabled,
created_on=exploration_model.created_on,
last_updated=exploration_model.last_updated)
def get_exploration_summary_by_id(exploration_id):
"""Returns a domain object representing an exploration summary.
Args:
exploration_id: str. The id of the ExplorationSummary to be returned.
Returns:
ExplorationSummary. The summary domain object corresponding to the
given exploration.
"""
# TODO(msl): Maybe use memcache similarly to get_exploration_by_id.
exp_summary_model = exp_models.ExpSummaryModel.get(
exploration_id, strict=False)
if exp_summary_model:
exp_summary = get_exploration_summary_from_model(exp_summary_model)
return exp_summary
else:
return None
def get_exploration_summaries_from_models(exp_summary_models):
"""Returns a dict with ExplorationSummary domain objects as values,
keyed by their exploration id.
Args:
exp_summary_models: list(ExplorationSummary). List of ExplorationSummary
model instances.
Returns:
dict. The keys are exploration ids and the values are the corresponding
ExplorationSummary domain objects.
"""
exploration_summaries = [
get_exploration_summary_from_model(exp_summary_model)
for exp_summary_model in exp_summary_models]
result = {}
for exp_summary in exploration_summaries:
result[exp_summary.id] = exp_summary
return result
def get_exploration_summary_from_model(exp_summary_model):
"""Returns an ExplorationSummary domain object.
Args:
exp_summary_model: ExplorationSummary. An ExplorationSummary model
instance.
Returns:
ExplorationSummary. The summary domain object correspoding to the
given exploration summary model.
"""
return exp_domain.ExplorationSummary(
exp_summary_model.id, exp_summary_model.title,
exp_summary_model.category, exp_summary_model.objective,
exp_summary_model.language_code, exp_summary_model.tags,
exp_summary_model.ratings, exp_summary_model.scaled_average_rating,
exp_summary_model.status, exp_summary_model.community_owned,
exp_summary_model.owner_ids, exp_summary_model.editor_ids,
exp_summary_model.voice_artist_ids, exp_summary_model.viewer_ids,
exp_summary_model.contributor_ids,
exp_summary_model.contributors_summary, exp_summary_model.version,
exp_summary_model.exploration_model_created_on,
exp_summary_model.exploration_model_last_updated,
exp_summary_model.first_published_msec
)
def get_exploration_summaries_matching_ids(exp_ids):
"""Returns a list of ExplorationSummary domain objects (or None if the
corresponding summary does not exist) corresponding to the given
list of exploration ids.
Args:
exp_ids: list(str). List of exploration ids.
Returns:
list(ExplorationSummary|None). List of ExplorationSummary domain objects
corresponding to the given exploration ids. If an ExplorationSummary
does not exist, the corresponding returned list element is None.
"""
return [get_exploration_summary_from_model(model) if model else None
for model in exp_models.ExpSummaryModel.get_multi(exp_ids)]
def get_exploration_summaries_subscribed_to(user_id):
"""Returns a list of ExplorationSummary domain objects that the user
subscribes to.
Args:
user_id: str. The id of the user.
Returns:
list(ExplorationSummary). List of ExplorationSummary domain objects that
the user subscribes to.
"""
return [
summary for summary in
get_exploration_summaries_matching_ids(
subscription_services.get_exploration_ids_subscribed_to(user_id)
) if summary is not None
]
def get_exploration_by_id(exploration_id, strict=True, version=None):
"""Returns an Exploration domain object.
Args:
exploration_id: str. The id of the exploration to be returned.
strict: bool. Whether to fail noisily if no exploration with a given id
exists.
version: int or None. The version of the exploration to be returned.
If None, the latest version of the exploration is returned.
Returns:
Exploration. The domain object corresponding to the given exploration.
"""
sub_namespace = python_utils.convert_to_bytes(version) if version else None
cached_exploration = caching_services.get_multi(
caching_services.CACHE_NAMESPACE_EXPLORATION,
sub_namespace,
[exploration_id]
).get(exploration_id)
if cached_exploration is not None:
return cached_exploration
else:
exploration_model = exp_models.ExplorationModel.get(
exploration_id, strict=strict, version=version)
if exploration_model:
exploration = get_exploration_from_model(exploration_model)
caching_services.set_multi(
caching_services.CACHE_NAMESPACE_EXPLORATION,
sub_namespace,
{
exploration_id: exploration
})
return exploration
else:
return None
def get_multiple_explorations_by_id(exp_ids, strict=True):
"""Returns a dict of domain objects representing explorations with the
given ids as keys. If an exp_id is not present, it is not included in the
return dict.
Args:
exp_ids: list(str). List of ids of the exploration to be returned.
strict: bool. If True, a ValueError is raised when any exploration id
is invalid.
Returns:
dict. Maps exploration ids to the corresponding Exploration domain
objects. Any invalid exploration ids are omitted.
Raises:
ValueError. When strict is True and at least one of the given exp_ids
is invalid.
"""
result = {}
uncached = []
cache_result = caching_services.get_multi(
caching_services.CACHE_NAMESPACE_EXPLORATION, None, exp_ids)
for exp_obj in cache_result.values():
result[exp_obj.id] = exp_obj
for _id in exp_ids:
if _id not in result:
uncached.append(_id)
db_exp_models = exp_models.ExplorationModel.get_multi(uncached)
db_results_dict = {}
not_found = []
for i, eid in enumerate(uncached):
model = db_exp_models[i]
if model:
exploration = get_exploration_from_model(model)
db_results_dict[eid] = exploration
else:
logging.info(
'Tried to fetch exploration with id %s, but no such '
'exploration exists in the datastore' % eid)
not_found.append(eid)
if strict and not_found:
raise ValueError(
'Couldn\'t find explorations with the following ids:\n%s'
% '\n'.join(not_found))
cache_update = {
eid: db_results_dict[eid] for eid in db_results_dict
if db_results_dict[eid] is not None
}
if cache_update:
caching_services.set_multi(
caching_services.CACHE_NAMESPACE_EXPLORATION, None, cache_update)
result.update(db_results_dict)
return result
def get_exploration_summaries_where_user_has_role(user_id):
"""Returns a list of ExplorationSummary domain objects where the user has
some role.
Args:
user_id: str. The id of the user.
Returns:
list(ExplorationSummary). List of ExplorationSummary domain objects
where the user has some role.
"""
exp_summary_models = exp_models.ExpSummaryModel.query(
datastore_services.any_of(
exp_models.ExpSummaryModel.owner_ids == user_id,
exp_models.ExpSummaryModel.editor_ids == user_id,
exp_models.ExpSummaryModel.voice_artist_ids == user_id,
exp_models.ExpSummaryModel.viewer_ids == user_id,
exp_models.ExpSummaryModel.contributor_ids == user_id
)
).fetch()
return [
get_exploration_summary_from_model(exp_summary_model)
for exp_summary_model in exp_summary_models
]
|
py | b4162f05c535355ef6c9b07309b36f737ea1e75b | # coding: utf-8
"""
Control-M Services
Provides access to BMC Control-M Services # noqa: E501
OpenAPI spec version: 9.20.215
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from clients.ctm_api_client.configuration import Configuration
class AlertStatusParam(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {"alert_ids": "list[int]", "status": "str"}
attribute_map = {"alert_ids": "alertIds", "status": "status"}
def __init__(self, alert_ids=None, status=None, _configuration=None): # noqa: E501
"""AlertStatusParam - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._alert_ids = None
self._status = None
self.discriminator = None
self.alert_ids = alert_ids
if status is not None:
self.status = status
@property
def alert_ids(self):
"""Gets the alert_ids of this AlertStatusParam. # noqa: E501
alertIds. HIDDEN. # noqa: E501
:return: The alert_ids of this AlertStatusParam. # noqa: E501
:rtype: list[int]
"""
return self._alert_ids
@alert_ids.setter
def alert_ids(self, alert_ids):
"""Sets the alert_ids of this AlertStatusParam.
alertIds. HIDDEN. # noqa: E501
:param alert_ids: The alert_ids of this AlertStatusParam. # noqa: E501
:type: list[int]
"""
if self._configuration.client_side_validation and alert_ids is None:
raise ValueError(
"Invalid value for `alert_ids`, must not be `None`"
) # noqa: E501
self._alert_ids = alert_ids
@property
def status(self):
"""Gets the status of this AlertStatusParam. # noqa: E501
modify status. HIDDEN. # noqa: E501
:return: The status of this AlertStatusParam. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this AlertStatusParam.
modify status. HIDDEN. # noqa: E501
:param status: The status of this AlertStatusParam. # noqa: E501
:type: str
"""
allowed_values = ["Undefined", "Reviewed", "Closed", "New"] # noqa: E501
if self._configuration.client_side_validation and status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}".format( # noqa: E501
status, allowed_values
)
)
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
if issubclass(AlertStatusParam, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AlertStatusParam):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, AlertStatusParam):
return True
return self.to_dict() != other.to_dict()
|
py | b4162f4b24fca917010e338d8ab1a2e6cb2d7137 | # --------------------------------------------------------
# Tensorflow Faster R-CNN
# Licensed under The MIT License [see LICENSE for details]
# Written by Xinlei Chen
# --------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import math
import torchvision.models as models
from model.faster_rcnn.faster_rcnn_inc_bbox_distil import _fasterRCNN_inc_bbox_distil
import pdb
class vgg16_inc(_fasterRCNN_inc_bbox_distil):
def __init__(self, classes, pretrained=False, class_agnostic=False):
self.model_path = 'data/pretrained_model/vgg16_caffe.pth'
self.dout_base_model = 512
self.pretrained = pretrained
self.class_agnostic = class_agnostic
_fasterRCNN_inc_bbox_distil.__init__(self, classes, class_agnostic)
def _init_modules(self):
vgg = models.vgg16()
if self.pretrained:
print("Loading pretrained weights from %s" %(self.model_path))
state_dict = torch.load(self.model_path)
vgg.load_state_dict({k:v for k,v in state_dict.items() if k in vgg.state_dict()})
vgg.classifier = nn.Sequential(*list(vgg.classifier._modules.values())[:-1])
# not using the last maxpool layer
self.RCNN_base = nn.Sequential(*list(vgg.features._modules.values())[:-1])
# Fix the layers before conv3:
for layer in range(10):
for p in self.RCNN_base[layer].parameters(): p.requires_grad = False
# self.RCNN_base = _RCNN_base(vgg.features, self.classes, self.dout_base_model)
self.RCNN_top = vgg.classifier
# not using the last maxpool layer
n_new_class = 1######################### split old and new
self.RCNN_cls_score = nn.Linear(4096, self.n_classes-n_new_class)
self.RCNN_cls_score_new = nn.Linear(4096,n_new_class)############## split old and new
if self.class_agnostic:
self.RCNN_bbox_pred = nn.Linear(4096, 4)
else:
self.RCNN_bbox_pred = nn.Linear(4096, 4 * (self.n_classes-n_new_class))
self.RCNN_bbox_pred_new = nn.Linear(4096, 4 * n_new_class)
def _head_to_tail(self, pool5):
pool5_flat = pool5.view(pool5.size(0), -1)
fc7 = self.RCNN_top(pool5_flat)
return fc7
|
py | b4162f63d561b90720a8a017e046840f06a372a1 | from .base_sei import BaseModel
from .no_sei import NoSEI
from .reaction_limited import ReactionLimited
from .solvent_diffusion_limited import SolventDiffusionLimited
from .electron_migration_limited import ElectronMigrationLimited
from .interstitial_diffusion_limited import InterstitialDiffusionLimited
|
py | b4162fee918e45c534ed7afb9294706079464635 | # ==================================================================================================
# Copyright 2013 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
import os
import shutil
from twitter.pants.binary_util import profile_classpath, runjava_indivisible
from twitter.pants.tasks import Task, TaskError
from twitter.pants.tasks.jvm_task import JvmTask
class BenchmarkRun(JvmTask):
@classmethod
def setup_parser(cls, option_group, args, mkflag):
option_group.add_option(mkflag("target"), dest = "target_class", action="append",
help = "Name of the benchmark class.")
option_group.add_option(mkflag("memory"), mkflag("memory", negate=True),
dest="memory_profiling", default=False,
action="callback", callback=mkflag.set_bool,
help="[%default] Enable memory profiling.")
option_group.add_option(mkflag("debug"), mkflag("debug", negate=True),
dest="debug", default=False,
action="callback", callback=mkflag.set_bool,
help="[%default] Enable caliper debug mode.")
def __init__(self, context):
Task.__init__(self, context)
self.profile = context.config.get('benchmark-run', 'profile',
default="benchmark-caliper-0.5")
self.confs = context.config.getlist('benchmark-run', 'confs')
self.java_args = context.config.getlist('benchmark-run', 'args',
default=['-Xmx1g', '-XX:MaxPermSize=256m'])
self.agent_profile = context.config.get('benchmark-run', 'agent_profile',
default="benchmark-java-allocation-instrumenter-2.1")
# TODO(Steve Gury):
# Find all the target classes from the Benchmark target itself
# https://jira.twitter.biz/browse/AWESOME-1938
self.caliper_args = context.options.target_class
if context.options.memory_profiling:
self.caliper_args += ['--measureMemory']
# For rewriting JDK classes to work, the JAR file has to be listed specifically in
# the JAR manifest as something that goes in the bootclasspath.
# The MANIFEST list a jar 'allocation.jar' this is why we have to rename it
agent_jar = os.readlink(profile_classpath(self.agent_profile)[0])
allocation_jar = os.path.join(os.path.dirname(agent_jar), "allocation.jar")
# TODO(Steve Gury): Find a solution to avoid copying the jar every run and being resilient
# to version upgrade
shutil.copyfile(agent_jar, allocation_jar)
os.environ['ALLOCATION_JAR'] = str(allocation_jar)
if context.options.debug:
self.java_args.extend(context.config.getlist('jvm', 'debug_args'))
self.caliper_args += ['--debug']
def execute(self, targets):
exit_code = runjava_indivisible(
jvmargs=self.java_args,
classpath=self.classpath(profile_classpath(self.profile), confs=self.confs),
main='com.google.caliper.Runner',
opts=self.caliper_args,
workunit_name='caliper'
)
if exit_code != 0:
raise TaskError()
|
py | b41630723d8468b671fc80f36602e0005663f60d | _base_ = [
'../_base_/models/ocrnet_hr18.py',
'../_base_/datasets/cityscapes.py',
'../_base_/default_runtime.py',
'../_base_/schedules/schedule_80k.py'
]
norm_cfg = dict(type='SyncBN', requires_grad=True)
model = dict(
backbone=dict(
extra=dict(
stage2=dict(num_channels=(48, 96)),
stage3=dict(num_channels=(48, 96, 192)),
stage4=dict(num_channels=(48, 96, 192, 384)))),
decode_head=[
dict(
type='FCNHead',
in_channels=[48, 96, 192, 384],
channels=sum([48, 96, 192, 384]),
in_index=(0, 1, 2, 3),
input_transform='resize_concat',
kernel_size=1,
num_convs=1,
concat_input=False,
dropout_ratio=-1,
num_classes=19,
norm_cfg=norm_cfg,
align_corners=True,
loss_decode=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.4)),
dict(
type='OCRHead',
in_channels=[48, 96, 192, 384],
in_index=(0, 1, 2, 3),
input_transform='resize_concat',
channels=512,
ocr_channels=256,
dropout_ratio=0.1,
num_classes=19,
norm_cfg=norm_cfg,
align_corners=True,
loss_decode=dict(
type='RMILoss', num_classes=19, loss_weight=1.0))
]
)
optimizer = dict(lr=0.002)
lr_config = dict(min_lr=2e-5)
data = dict(samples_per_gpu=2, workers_per_gpu=2)
|
py | b41633ef7c435036b50417e81573bda5fdeffbcf | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Binary for generating mean and stdev of all video level examples."""
import time
import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow import app
from tensorflow import flags
from tensorflow import gfile
from tensorflow import logging
import readers
import utils
#%%
FLAGS = flags.FLAGS
if __name__ == '__main__':
flags.DEFINE_string(
"input_data_pattern", "",
"File glob defining the evaluation dataset in tensorflow.SequenceExample "
"format. The SequenceExamples are expected to have an 'rgb' byte array "
"sequence feature as well as a 'labels' int64 context feature.")
flags.DEFINE_string("input_data_pattern2", "", "Additional data files.")
flags.DEFINE_string("input_data_pattern3", "", "More data files.")
flags.DEFINE_string("output_file", "",
"The file to save the l2 params to.")
# Model flags.
flags.DEFINE_bool(
"frame_features", False,
"If set, then --eval_data_pattern must be frame-level features. "
"Otherwise, --eval_data_pattern must be aggregated video-level "
"features. The model must also be set appropriately (i.e. to read 3D "
"batches VS 4D batches.")
flags.DEFINE_integer(
"batch_size", 8192,
"How many examples to process per batch.")
flags.DEFINE_string("feature_names", "mean_rgb,mean_audio", "Name of the feature "
"to use for training.")
flags.DEFINE_string("feature_sizes", "1024,128", "Length of the feature vectors.")
# Other flags.
flags.DEFINE_integer("num_readers", 8,
"How many threads to use for reading input files.")
def get_input_data_tensors(
reader,
data_pattern1,
data_pattern2,
data_pattern3,
batch_size,
num_readers=1):
"""Creates the section of the graph which reads the input data.
Args:
reader: A class which parses the input data.
data_pattern: A 'glob' style path to the data files.
batch_size: How many examples to process at a time.
num_readers: How many I/O threads to use.
Returns:
A tuple containing the features tensor, labels tensor, and optionally a
tensor containing the number of frames per video. The exact dimensions
depend on the reader being used.
Raises:
IOError: If no files matching the given pattern were found.
"""
with tf.name_scope("input"):
files1 = gfile.Glob(data_pattern1)
files2 = gfile.Glob(data_pattern2)
files3 = gfile.Glob(data_pattern3)
files = files1 + files2 + files3
if not files:
raise IOError("Unable to find input files. data_pattern='" +
data_pattern1 + "'")
logging.info("number of input files: " + str(len(files)))
filename_queue = tf.train.string_input_producer(
files, num_epochs=1, shuffle=False)
examples_and_labels = [reader.prepare_reader(filename_queue)
for _ in range(num_readers)]
video_id_batch, video_batch, unused_labels, num_frames_batch = (
tf.train.batch_join(examples_and_labels,
batch_size=batch_size,
allow_smaller_final_batch = True,
enqueue_many=True))
return video_id_batch, video_batch, num_frames_batch
def calculate_moments(
reader,
feature_names,
feature_sizes,
data_pattern1,
data_pattern2,
data_pattern3,
out_file_location,
batch_size):
with tf.Session() as sess:
video_id_batch, video_batch, num_frames_batch = get_input_data_tensors(
reader, data_pattern1, data_pattern2, data_pattern3, batch_size)
feat_sum = tf.Variable(tf.zeros([sum(feature_sizes)]), name="feat_sum", )
feat_sq_sum = tf.Variable(tf.zeros([sum(feature_sizes)]), name="feat_sq_sum")
num_examples = tf.Variable(0, dtype=tf.int32, name = "num_examples")
feat_sum += tf.reduce_sum(video_batch, axis=0)
feat_sq_sum += tf.reduce_sum( tf.square(video_batch), axis=0)
num_examples += tf.shape(video_batch)[0]
# Workaround for num_epochs issue.
def set_up_init_ops(variables):
init_op_list = []
for variable in list(variables):
if "train_input" in variable.name:
init_op_list.append(tf.assign(variable, 1))
variables.remove(variable)
init_op_list.append(tf.variables_initializer(variables))
return init_op_list
sess.run(set_up_init_ops(tf.get_collection_ref(
tf.GraphKeys.LOCAL_VARIABLES)))
sess.run(tf.global_variables_initializer() )
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
num_examples_processed = 0
start_time = time.time()
try:
fetches = [num_examples, feat_sum, feat_sq_sum, video_batch]
while not coord.should_stop():
num_examples_val, feat_sum_val, feat_sq_sum_val, video_batch_val = sess.run(fetches)
now = time.time()
num_examples_processed += len(video_batch_val)
logging.info("num examples processed: " + str(num_examples_processed)
+ " elapsed seconds: " + "{0:.2f}".format(now-start_time))
except tf.errors.OutOfRangeError:
logging.info('Done with summation. num_examples = {}.'.format(num_examples_processed))
finally:
coord.request_stop()
coord.join(threads)
sess.close()
global_mean = feat_sum_val / num_examples_val
global_std = np.sqrt(feat_sq_sum_val / num_examples_val - global_mean * global_mean)
res = pd.DataFrame({'global_mean':global_mean, 'global_std':global_std})
res.to_csv(out_file_location)
def main(unused_argv):
logging.set_verbosity(tf.logging.INFO)
# convert feature_names and feature_sizes to lists of values
feature_names, feature_sizes = utils.GetListOfFeatureNamesAndSizes(
FLAGS.feature_names, FLAGS.feature_sizes)
if FLAGS.frame_features:
reader = readers.YT8MFrameFeatureReader(feature_names=feature_names,
feature_sizes=feature_sizes)
else:
reader = readers.YT8MAggregatedFeatureReader(feature_names=feature_names,
feature_sizes=feature_sizes)
if FLAGS.output_file is "":
raise ValueError("'output_file' was not specified. "
"Unable to continue with inference.")
if FLAGS.input_data_pattern is "":
raise ValueError("'input_data_pattern' was not specified. "
"Unable to continue with inference.")
calculate_moments(reader,
feature_names,
feature_sizes,
FLAGS.input_data_pattern,
FLAGS.input_data_pattern2,
FLAGS.input_data_pattern3,
FLAGS.output_file,
FLAGS.batch_size)
if __name__ == "__main__":
app.run()
|
py | b4163455738f9ae75e7ec433441e2edd3eb2f72a | from petisco import use_case_handler, UseCase, Petisco
from meiga import Result, Error, isSuccess
from petisco.event.bus.domain.interface_event_bus import IEventBus
from taskmanager.src.modules.tasks.domain.events import TaskRemoved
from taskmanager.src.modules.tasks.domain.interface_task_repository import (
ITaskRepository,
)
from taskmanager.src.modules.tasks.domain.task_id import TaskId
@use_case_handler(logging_parameters_whitelist=["task_id"])
class TaskRemover(UseCase):
@staticmethod
def build():
return TaskRemover(
repository=Petisco.get_repository("task"), bus=Petisco.get_event_bus()
)
def __init__(self, repository: ITaskRepository, bus: IEventBus):
self.repository = repository
self.bus = bus
def execute(self, task_id: TaskId) -> Result[bool, Error]:
self.repository.remove(task_id).unwrap_or_return()
self.bus.publish(TaskRemoved(task_id))
return isSuccess
|
py | b4163480f0e76077f5915e6ebe7127a76ce93499 | # from __future__ import division
# from __future__ import print_function
import os
import time
import tensorflow as tf
import numpy as np
from models import SampleAndAggregate, SAGEInfo, Node2VecModel
from minibatch import EdgeMinibatchIterator
from neigh_samplers import UniformNeighborSampler
from utils import load_data
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
# Set random seed
seed = 123
np.random.seed(seed)
tf.set_random_seed(seed)
# Settings
flags = tf.app.flags
FLAGS = flags.FLAGS
tf.app.flags.DEFINE_boolean('log_device_placement', False,
"""Whether to log device placement.""")
#core params..
flags.DEFINE_string('model', 'graphsage', 'model names. See README for possible values.')
flags.DEFINE_float('learning_rate', 0.00001, 'initial learning rate.')
flags.DEFINE_string("model_size", "small", "Can be big or small; model specific def'ns")
flags.DEFINE_string('train_prefix', '', 'name of the object file that stores the training data. must be specified.')
# left to default values in main experiments
flags.DEFINE_integer('epochs', 1, 'number of epochs to train.')
flags.DEFINE_float('dropout', 0.0, 'dropout rate (1 - keep probability).')
flags.DEFINE_float('weight_decay', 0.0, 'weight for l2 loss on embedding matrix.')
flags.DEFINE_integer('max_degree', 100, 'maximum node degree.')
flags.DEFINE_integer('samples_1', 25, 'number of samples in layer 1')
flags.DEFINE_integer('samples_2', 10, 'number of users samples in layer 2')
flags.DEFINE_integer('dim_1', 128, 'Size of output dim (final is 2x this, if using concat)')
flags.DEFINE_integer('dim_2', 128, 'Size of output dim (final is 2x this, if using concat)')
flags.DEFINE_boolean('random_context', True, 'Whether to use random context or direct edges')
flags.DEFINE_integer('neg_sample_size', 20, 'number of negative samples')
flags.DEFINE_integer('batch_size', 512, 'minibatch size.')
flags.DEFINE_integer('n2v_test_epochs', 1, 'Number of new SGD epochs for n2v.')
flags.DEFINE_integer('identity_dim', 0, 'Set to positive value to use identity embedding features of that dimension. Default 0.')
#logging, saving, validation settings etc.
flags.DEFINE_boolean('save_embeddings', True, 'whether to save embeddings for all nodes after training')
flags.DEFINE_string('base_log_dir', '.', 'base directory for logging and saving embeddings')
flags.DEFINE_integer('validate_iter', 5000, "how often to run a validation minibatch.")
flags.DEFINE_integer('validate_batch_size', 256, "how many nodes per validation sample.")
flags.DEFINE_integer('gpu', 1, "which gpu to use.")
flags.DEFINE_integer('print_every', 50, "How often to print training info.")
flags.DEFINE_integer('max_total_steps', 10**10, "Maximum total number of iterations")
os.environ["CUDA_VISIBLE_DEVICES"]=str(FLAGS.gpu)
GPU_MEM_FRACTION = 0.8
def log_dir():
log_dir = FLAGS.base_log_dir + "/unsup-" + FLAGS.train_prefix.split("/")[-2]
log_dir += "/{model:s}_{model_size:s}_{lr:0.6f}/".format(
model=FLAGS.model,
model_size=FLAGS.model_size,
lr=FLAGS.learning_rate)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
return log_dir
# Define model evaluation function
def evaluate(sess, model, minibatch_iter, size=None):
t_test = time.time()
feed_dict_val = minibatch_iter.val_feed_dict(size)
outs_val = sess.run([model.loss, model.ranks, model.mrr],
feed_dict=feed_dict_val)
return outs_val[0], outs_val[1], outs_val[2], (time.time() - t_test)
def incremental_evaluate(sess, model, minibatch_iter, size):
t_test = time.time()
finished = False
val_losses = []
val_mrrs = []
iter_num = 0
while not finished:
feed_dict_val, finished, _ = minibatch_iter.incremental_val_feed_dict(size, iter_num)
iter_num += 1
outs_val = sess.run([model.loss, model.ranks, model.mrr],
feed_dict=feed_dict_val)
val_losses.append(outs_val[0])
val_mrrs.append(outs_val[2])
return np.mean(val_losses), np.mean(val_mrrs), (time.time() - t_test)
def save_val_embeddings(sess, model, minibatch_iter, size, out_dir, mod=""):
val_embeddings = []
finished = False
seen = set([])
nodes = []
iter_num = 0
name = "val"
while not finished:
feed_dict_val, finished, edges = minibatch_iter.incremental_embed_feed_dict(size, iter_num)
iter_num += 1
outs_val = sess.run([model.loss, model.mrr, model.outputs1],
feed_dict=feed_dict_val)
#ONLY SAVE FOR embeds1 because of planetoid
for i, edge in enumerate(edges):
if not edge[0] in seen:
val_embeddings.append(outs_val[-1][i,:])
nodes.append(edge[0])
seen.add(edge[0])
if not os.path.exists(out_dir):
os.makedirs(out_dir)
val_embeddings = np.vstack(val_embeddings)
np.save(out_dir + name + mod + ".npy", val_embeddings)
with open(out_dir + name + mod + ".txt", "w") as fp:
fp.write("\n".join(map(str,nodes)))
def construct_placeholders():
# Define placeholders
placeholders = {
'batch1' : tf.placeholder(tf.int32, shape=(None), name='batch1'),
'batch2' : tf.placeholder(tf.int32, shape=(None), name='batch2'),
# negative samples for all nodes in the batch
'neg_samples': tf.placeholder(tf.int32, shape=(None,),
name='neg_sample_size'),
'dropout': tf.placeholder_with_default(0., shape=(), name='dropout'),
'batch_size' : tf.placeholder(tf.int32, name='batch_size'),
}
return placeholders
def train(train_data, test_data=None):
G = train_data[0]
features = train_data[1]
id_map = train_data[2]
if not features is None:
# pad with dummy zero vector
features = np.vstack([features, np.zeros((features.shape[1],))])
context_pairs = train_data[3] if FLAGS.random_context else None
placeholders = construct_placeholders()
minibatch = EdgeMinibatchIterator(G,
id_map,
placeholders, batch_size=FLAGS.batch_size,
max_degree=FLAGS.max_degree,
num_neg_samples=FLAGS.neg_sample_size,
context_pairs = context_pairs)
adj_info_ph = tf.placeholder(tf.int32, shape=minibatch.adj.shape)
adj_info = tf.Variable(adj_info_ph, trainable=False, name="adj_info")
if FLAGS.model == 'graphsage_mean':
# Create model
sampler = UniformNeighborSampler(adj_info)
layer_infos = [SAGEInfo("node", sampler, FLAGS.samples_1, FLAGS.dim_1),
SAGEInfo("node", sampler, FLAGS.samples_2, FLAGS.dim_2)]
model = SampleAndAggregate(placeholders,
features,
adj_info,
minibatch.deg,
layer_infos=layer_infos,
model_size=FLAGS.model_size,
identity_dim = FLAGS.identity_dim,
logging=True)
elif FLAGS.model == 'gcn':
# Create model
sampler = UniformNeighborSampler(adj_info)
layer_infos = [SAGEInfo("node", sampler, FLAGS.samples_1, 2*FLAGS.dim_1),
SAGEInfo("node", sampler, FLAGS.samples_2, 2*FLAGS.dim_2)]
model = SampleAndAggregate(placeholders,
features,
adj_info,
minibatch.deg,
layer_infos=layer_infos,
aggregator_type="gcn",
model_size=FLAGS.model_size,
identity_dim = FLAGS.identity_dim,
concat=False,
logging=True)
elif FLAGS.model == 'graphsage_seq':
sampler = UniformNeighborSampler(adj_info)
layer_infos = [SAGEInfo("node", sampler, FLAGS.samples_1, FLAGS.dim_1),
SAGEInfo("node", sampler, FLAGS.samples_2, FLAGS.dim_2)]
model = SampleAndAggregate(placeholders,
features,
adj_info,
minibatch.deg,
layer_infos=layer_infos,
identity_dim = FLAGS.identity_dim,
aggregator_type="seq",
model_size=FLAGS.model_size,
logging=True)
elif FLAGS.model == 'graphsage_maxpool':
sampler = UniformNeighborSampler(adj_info)
layer_infos = [SAGEInfo("node", sampler, FLAGS.samples_1, FLAGS.dim_1),
SAGEInfo("node", sampler, FLAGS.samples_2, FLAGS.dim_2)]
model = SampleAndAggregate(placeholders,
features,
adj_info,
minibatch.deg,
layer_infos=layer_infos,
aggregator_type="maxpool",
model_size=FLAGS.model_size,
identity_dim = FLAGS.identity_dim,
logging=True)
elif FLAGS.model == 'graphsage_meanpool':
sampler = UniformNeighborSampler(adj_info)
layer_infos = [SAGEInfo("node", sampler, FLAGS.samples_1, FLAGS.dim_1),
SAGEInfo("node", sampler, FLAGS.samples_2, FLAGS.dim_2)]
model = SampleAndAggregate(placeholders,
features,
adj_info,
minibatch.deg,
layer_infos=layer_infos,
aggregator_type="meanpool",
model_size=FLAGS.model_size,
identity_dim = FLAGS.identity_dim,
logging=True)
elif FLAGS.model == 'n2v':
model = Node2VecModel(placeholders, features.shape[0],
minibatch.deg,
#2x because graphsage uses concat
nodevec_dim=2*FLAGS.dim_1,
lr=FLAGS.learning_rate)
else:
raise Exception('Error: model name unrecognized.')
config = tf.ConfigProto(log_device_placement=FLAGS.log_device_placement)
config.gpu_options.allow_growth = True
#config.gpu_options.per_process_gpu_memory_fraction = GPU_MEM_FRACTION
config.allow_soft_placement = True
# Initialize session
sess = tf.Session(config=config)
merged = tf.summary.merge_all()
summary_writer = tf.summary.FileWriter(log_dir(), sess.graph)
# Init variables
sess.run(tf.global_variables_initializer(), feed_dict={adj_info_ph: minibatch.adj})
# Train model
train_shadow_mrr = None
shadow_mrr = None
total_steps = 0
avg_time = 0.0
epoch_val_costs = []
train_adj_info = tf.assign(adj_info, minibatch.adj)
val_adj_info = tf.assign(adj_info, minibatch.test_adj)
for epoch in range(FLAGS.epochs):
minibatch.shuffle()
iter = 0
print('Epoch: %04d' % (epoch + 1))
epoch_val_costs.append(0)
while not minibatch.end():
# Construct feed dictionary
feed_dict = minibatch.next_minibatch_feed_dict()
feed_dict.update({placeholders['dropout']: FLAGS.dropout})
t = time.time()
# Training step
outs = sess.run([merged, model.opt_op, model.loss, model.ranks, model.aff_all,
model.mrr, model.outputs1], feed_dict=feed_dict)
train_cost = outs[2]
train_mrr = outs[5]
if train_shadow_mrr is None:
train_shadow_mrr = train_mrr#
else:
train_shadow_mrr -= (1-0.99) * (train_shadow_mrr - train_mrr)
if iter % FLAGS.validate_iter == 0:
# Validation
sess.run(val_adj_info.op)
val_cost, ranks, val_mrr, duration = evaluate(sess, model, minibatch, size=FLAGS.validate_batch_size)
sess.run(train_adj_info.op)
epoch_val_costs[-1] += val_cost
if shadow_mrr is None:
shadow_mrr = val_mrr
else:
shadow_mrr -= (1-0.99) * (shadow_mrr - val_mrr)
if total_steps % FLAGS.print_every == 0:
summary_writer.add_summary(outs[0], total_steps)
# Print results
avg_time = (avg_time * total_steps + time.time() - t) / (total_steps + 1)
if total_steps % FLAGS.print_every == 0:
print("Iter:", '%04d' % iter,
"train_loss=", "{:.5f}".format(train_cost),
"train_mrr=", "{:.5f}".format(train_mrr),
"train_mrr_ema=", "{:.5f}".format(train_shadow_mrr), # exponential moving average
"val_loss=", "{:.5f}".format(val_cost),
"val_mrr=", "{:.5f}".format(val_mrr),
"val_mrr_ema=", "{:.5f}".format(shadow_mrr), # exponential moving average
"time=", "{:.5f}".format(avg_time))
iter += 1
total_steps += 1
if total_steps > FLAGS.max_total_steps:
break
if total_steps > FLAGS.max_total_steps:
break
print("Optimization Finished!")
if FLAGS.save_embeddings:
sess.run(val_adj_info.op)
save_val_embeddings(sess, model, minibatch, FLAGS.validate_batch_size, log_dir())
if FLAGS.model == "n2v":
# stopping the gradient for the already trained nodes
train_ids = tf.constant([[id_map[n]] for n in G.nodes_iter() if not G.node[n]['val'] and not G.node[n]['test']],
dtype=tf.int32)
test_ids = tf.constant([[id_map[n]] for n in G.nodes_iter() if G.node[n]['val'] or G.node[n]['test']],
dtype=tf.int32)
update_nodes = tf.nn.embedding_lookup(model.context_embeds, tf.squeeze(test_ids))
no_update_nodes = tf.nn.embedding_lookup(model.context_embeds,tf.squeeze(train_ids))
update_nodes = tf.scatter_nd(test_ids, update_nodes, tf.shape(model.context_embeds))
no_update_nodes = tf.stop_gradient(tf.scatter_nd(train_ids, no_update_nodes, tf.shape(model.context_embeds)))
model.context_embeds = update_nodes + no_update_nodes
sess.run(model.context_embeds)
# run random walks
from graphsage.utils import run_random_walks
nodes = [n for n in G.nodes_iter() if G.node[n]["val"] or G.node[n]["test"]]
start_time = time.time()
pairs = run_random_walks(G, nodes, num_walks=50)
walk_time = time.time() - start_time
test_minibatch = EdgeMinibatchIterator(G,
id_map,
placeholders, batch_size=FLAGS.batch_size,
max_degree=FLAGS.max_degree,
num_neg_samples=FLAGS.neg_sample_size,
context_pairs = pairs,
n2v_retrain=True,
fixed_n2v=True)
start_time = time.time()
print("Doing test training for n2v.")
test_steps = 0
for epoch in range(FLAGS.n2v_test_epochs):
test_minibatch.shuffle()
while not test_minibatch.end():
feed_dict = test_minibatch.next_minibatch_feed_dict()
feed_dict.update({placeholders['dropout']: FLAGS.dropout})
outs = sess.run([model.opt_op, model.loss, model.ranks, model.aff_all,
model.mrr, model.outputs1], feed_dict=feed_dict)
if test_steps % FLAGS.print_every == 0:
print("Iter:", '%04d' % test_steps,
"train_loss=", "{:.5f}".format(outs[1]),
"train_mrr=", "{:.5f}".format(outs[-2]))
test_steps += 1
train_time = time.time() - start_time
save_val_embeddings(sess, model, minibatch, FLAGS.validate_batch_size, log_dir(), mod="-test")
print("Total time: ", train_time+walk_time)
print("Walk time: ", walk_time)
print("Train time: ", train_time)
def main(argv=None):
print("Loading training data..")
train_data = load_data(FLAGS.train_prefix, load_walks=True)
print("Done loading training data..")
train(train_data)
if __name__ == '__main__':
tf.app.run()
|
py | b41635152f9f259b7a931f238f5b2d207e19550a | import json
import os
import threading
from CTRegisterMicroserviceFlask.errors import NotFound
from flask import jsonify
from requests import post, Session, Request
AUTOREGISTER_MODE = 'AUTOREGISTER_MODE'
NORMAL_MODE = 'NORMAL_MODE'
CT_URL = os.getenv('CT_URL')
CT_TOKEN = os.getenv('CT_TOKEN')
API_VERSION = os.getenv('API_VERSION')
def ct_register(name, ct_url, url, active):
"""Autoregister method"""
payload = {'name': name, 'url': url, 'active': active}
try:
r = post(ct_url + '/api/v1/microservice', json=payload, timeout=10)
except Exception as error:
os._exit(1)
if r.status_code >= 400:
os._exit(1)
def register(app, name, info, swagger, mode, ct_url=False, url=False, active=True, delay=5.0):
"""Register method"""
if mode == AUTOREGISTER_MODE:
t = threading.Timer(delay, ct_register, [name, ct_url, url, active])
t.start()
@app.route('/info')
def get_info():
info['swagger'] = swagger
return jsonify(info)
@app.route('/ping')
def get_ping():
return 'pong'
def request_to_microservice(config):
"""Request to microservice method"""
try:
session = Session()
request = Request(
method=config.get('method'),
url=CT_URL + config.get('uri') if config.get(
'ignore_version') or not API_VERSION else CT_URL + '/' + API_VERSION + config.get('uri'),
headers={
'content-type': 'application/json',
'Authorization': 'Bearer ' + CT_TOKEN,
'APP_KEY': config.get('application', 'rw')
},
data=json.dumps(config.get('body'))
)
prepped = session.prepare_request(request)
response = session.send(prepped)
except Exception as error:
raise error
try:
return response.json()
except Exception:
raise NotFound(response.text)
|
py | b41635232db94e7c855e6f74fda2fdbf8a724cb4 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
from __future__ import absolute_import
from __future__ import division
import atexit
import functools
import itertools
import logging
import threading
import time
from typing import TYPE_CHECKING
from typing import Any
from typing import Iterator
from typing import Optional
from typing import Tuple
import grpc
from apache_beam.metrics import metric
from apache_beam.metrics.execution import MetricResult
from apache_beam.options.pipeline_options import DebugOptions
from apache_beam.options.pipeline_options import PortableOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.portability import common_urns
from apache_beam.portability.api import beam_artifact_api_pb2_grpc
from apache_beam.portability.api import beam_job_api_pb2
from apache_beam.portability.api import beam_runner_api_pb2
from apache_beam.runners import runner
from apache_beam.runners.job import utils as job_utils
from apache_beam.runners.portability import artifact_service
from apache_beam.runners.portability import job_server
from apache_beam.runners.portability import portable_metrics
from apache_beam.runners.portability import portable_stager
from apache_beam.runners.portability.fn_api_runner.fn_runner import translations
from apache_beam.runners.worker import sdk_worker_main
from apache_beam.runners.worker import worker_pool_main
from apache_beam.transforms import environments
if TYPE_CHECKING:
from google.protobuf import struct_pb2 # pylint: disable=ungrouped-imports
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.pipeline import Pipeline
__all__ = ['PortableRunner']
MESSAGE_LOG_LEVELS = {
beam_job_api_pb2.JobMessage.MESSAGE_IMPORTANCE_UNSPECIFIED: logging.INFO,
beam_job_api_pb2.JobMessage.JOB_MESSAGE_DEBUG: logging.DEBUG,
beam_job_api_pb2.JobMessage.JOB_MESSAGE_DETAILED: logging.DEBUG,
beam_job_api_pb2.JobMessage.JOB_MESSAGE_BASIC: logging.INFO,
beam_job_api_pb2.JobMessage.JOB_MESSAGE_WARNING: logging.WARNING,
beam_job_api_pb2.JobMessage.JOB_MESSAGE_ERROR: logging.ERROR,
}
TERMINAL_STATES = [
beam_job_api_pb2.JobState.DONE,
beam_job_api_pb2.JobState.DRAINED,
beam_job_api_pb2.JobState.FAILED,
beam_job_api_pb2.JobState.CANCELLED,
]
ENV_TYPE_ALIASES = {'LOOPBACK': 'EXTERNAL'}
_LOGGER = logging.getLogger(__name__)
class JobServiceHandle(object):
"""
Encapsulates the interactions necessary to submit a pipeline to a job service.
The base set of interactions consists of 3 steps:
- prepare
- stage
- run
"""
def __init__(self, job_service, options, retain_unknown_options=False):
self.job_service = job_service
self.options = options
self.timeout = options.view_as(PortableOptions).job_server_timeout
self._retain_unknown_options = retain_unknown_options
def submit(self, proto_pipeline):
# type: (beam_runner_api_pb2.Pipeline) -> Tuple[str, Iterator[beam_job_api_pb2.JobStateEvent], Iterator[beam_job_api_pb2.JobMessagesResponse]]
"""
Submit and run the pipeline defined by `proto_pipeline`.
"""
prepare_response = self.prepare(proto_pipeline)
retrieval_token = self.stage(
proto_pipeline,
prepare_response.artifact_staging_endpoint.url,
prepare_response.staging_session_token)
return self.run(prepare_response.preparation_id, retrieval_token)
def get_pipeline_options(self):
# type: () -> struct_pb2.Struct
"""
Get `self.options` as a protobuf Struct
"""
# fetch runner options from job service
# retries in case the channel is not ready
def send_options_request(max_retries=5):
num_retries = 0
while True:
try:
# This reports channel is READY but connections may fail
# Seems to be only an issue on Mac with port forwardings
return self.job_service.DescribePipelineOptions(
beam_job_api_pb2.DescribePipelineOptionsRequest(),
timeout=self.timeout)
except grpc.FutureTimeoutError:
# no retry for timeout errors
raise
except grpc._channel._Rendezvous as e:
num_retries += 1
if num_retries > max_retries:
raise e
time.sleep(1)
options_response = send_options_request()
def add_runner_options(parser):
for option in options_response.options:
try:
# no default values - we don't want runner options
# added unless they were specified by the user
add_arg_args = {'action': 'store', 'help': option.description}
if option.type == beam_job_api_pb2.PipelineOptionType.BOOLEAN:
add_arg_args['action'] = 'store_true' \
if option.default_value != 'true' else 'store_false'
elif option.type == beam_job_api_pb2.PipelineOptionType.INTEGER:
add_arg_args['type'] = int
elif option.type == beam_job_api_pb2.PipelineOptionType.ARRAY:
add_arg_args['action'] = 'append'
parser.add_argument("--%s" % option.name, **add_arg_args)
except Exception as e:
# ignore runner options that are already present
# only in this case is duplicate not treated as error
if 'conflicting option string' not in str(e):
raise
_LOGGER.debug("Runner option '%s' was already added" % option.name)
all_options = self.options.get_all_options(
add_extra_args_fn=add_runner_options,
retain_unknown_options=self._retain_unknown_options)
return job_utils.pipeline_options_dict_to_struct(all_options)
def prepare(self, proto_pipeline):
# type: (beam_runner_api_pb2.Pipeline) -> beam_job_api_pb2.PrepareJobResponse
"""Prepare the job on the job service"""
return self.job_service.Prepare(
beam_job_api_pb2.PrepareJobRequest(
job_name='job',
pipeline=proto_pipeline,
pipeline_options=self.get_pipeline_options()),
timeout=self.timeout)
def stage(self, pipeline, artifact_staging_endpoint, staging_session_token):
# type: (...) -> Optional[Any]
"""Stage artifacts"""
if artifact_staging_endpoint:
channel = grpc.insecure_channel(artifact_staging_endpoint)
try:
return self._stage_via_portable_service(channel, staging_session_token)
except grpc.RpcError as exn:
if exn.code() == grpc.StatusCode.UNIMPLEMENTED:
# This job server doesn't yet support the new protocol.
return self._stage_via_legacy_service(
pipeline, channel, staging_session_token)
else:
raise
else:
return None
def _stage_via_portable_service(
self, artifact_staging_channel, staging_session_token):
artifact_service.offer_artifacts(
beam_artifact_api_pb2_grpc.ArtifactStagingServiceStub(
channel=artifact_staging_channel),
artifact_service.ArtifactRetrievalService(
artifact_service.BeamFilesystemHandler(None).file_reader),
staging_session_token)
def _stage_via_legacy_service(
self, pipeline, artifact_staging_channel, staging_session_token):
stager = portable_stager.PortableStager(
artifact_staging_channel, staging_session_token)
resources = []
for _, env in pipeline.components.environments.items():
for dep in env.dependencies:
if dep.type_urn != common_urns.artifact_types.FILE.urn:
raise RuntimeError('unsupported artifact type %s' % dep.type_urn)
if dep.role_urn != common_urns.artifact_roles.STAGING_TO.urn:
raise RuntimeError('unsupported role type %s' % dep.role_urn)
type_payload = beam_runner_api_pb2.ArtifactFilePayload.FromString(
dep.type_payload)
role_payload = \
beam_runner_api_pb2.ArtifactStagingToRolePayload.FromString(
dep.role_payload)
resources.append((type_payload.path, role_payload.staged_name))
stager.stage_job_resources(resources, staging_location='')
return stager.commit_manifest()
def run(self, preparation_id, retrieval_token):
# type: (str, str) -> Tuple[str, Iterator[beam_job_api_pb2.JobStateEvent], Iterator[beam_job_api_pb2.JobMessagesResponse]]
"""Run the job"""
try:
state_stream = self.job_service.GetStateStream(
beam_job_api_pb2.GetJobStateRequest(job_id=preparation_id),
timeout=self.timeout)
# If there's an error, we don't always get it until we try to read.
# Fortunately, there's always an immediate current state published.
state_stream = itertools.chain([next(state_stream)], state_stream)
message_stream = self.job_service.GetMessageStream(
beam_job_api_pb2.JobMessagesRequest(job_id=preparation_id),
timeout=self.timeout)
except Exception:
# TODO(BEAM-6442): Unify preparation_id and job_id for all runners.
state_stream = message_stream = None
# Run the job and wait for a result, we don't set a timeout here because
# it may take a long time for a job to complete and streaming
# jobs currently never return a response.
run_response = self.job_service.Run(
beam_job_api_pb2.RunJobRequest(
preparation_id=preparation_id, retrieval_token=retrieval_token))
if state_stream is None:
state_stream = self.job_service.GetStateStream(
beam_job_api_pb2.GetJobStateRequest(job_id=run_response.job_id))
message_stream = self.job_service.GetMessageStream(
beam_job_api_pb2.JobMessagesRequest(job_id=run_response.job_id))
return run_response.job_id, message_stream, state_stream
class PortableRunner(runner.PipelineRunner):
"""
Experimental: No backward compatibility guaranteed.
A BeamRunner that executes Python pipelines via the Beam Job API.
This runner is a stub and does not run the actual job.
This runner schedules the job on a job service. The responsibility of
running and managing the job lies with the job service used.
"""
def __init__(self):
self._dockerized_job_server = None # type: Optional[job_server.JobServer]
@staticmethod
def _create_environment(options):
# type: (PipelineOptions) -> environments.Environment
portable_options = options.view_as(PortableOptions)
# Do not set a Runner. Otherwise this can cause problems in Java's
# PipelineOptions, i.e. ClassNotFoundException, if the corresponding Runner
# does not exist in the Java SDK. In portability, the entry point is clearly
# defined via the JobService.
portable_options.view_as(StandardOptions).runner = None
environment_type = portable_options.environment_type
if not environment_type:
environment_urn = common_urns.environments.DOCKER.urn
elif environment_type.startswith('beam:env:'):
environment_urn = environment_type
else:
# e.g. handle LOOPBACK -> EXTERNAL
environment_type = ENV_TYPE_ALIASES.get(
environment_type, environment_type)
try:
environment_urn = getattr(
common_urns.environments, environment_type).urn
except AttributeError:
raise ValueError('Unknown environment type: %s' % environment_type)
env_class = environments.Environment.get_env_cls_from_urn(environment_urn)
return env_class.from_options(portable_options)
def default_job_server(self, options):
raise NotImplementedError(
'You must specify a --job_endpoint when using --runner=PortableRunner. '
'Alternatively, you may specify which portable runner you intend to '
'use, such as --runner=FlinkRunner or --runner=SparkRunner.')
def create_job_service_handle(self, job_service, options):
return JobServiceHandle(job_service, options)
def create_job_service(self, options):
# type: (PipelineOptions) -> JobServiceHandle
"""
Start the job service and return a `JobServiceHandle`
"""
job_endpoint = options.view_as(PortableOptions).job_endpoint
if job_endpoint:
if job_endpoint == 'embed':
server = job_server.EmbeddedJobServer()
else:
job_server_timeout = options.view_as(PortableOptions).job_server_timeout
server = job_server.ExternalJobServer(job_endpoint, job_server_timeout)
else:
server = self.default_job_server(options)
return self.create_job_service_handle(server.start(), options)
@staticmethod
def get_proto_pipeline(pipeline, options):
# type: (Pipeline, PipelineOptions) -> beam_runner_api_pb2.Pipeline
portable_options = options.view_as(PortableOptions)
proto_pipeline = pipeline.to_runner_api(
default_environment=PortableRunner._create_environment(
portable_options))
# Some runners won't detect the GroupByKey transform unless it has no
# subtransforms. Remove all sub-transforms until BEAM-4605 is resolved.
for _, transform_proto in list(
proto_pipeline.components.transforms.items()):
if transform_proto.spec.urn == common_urns.primitives.GROUP_BY_KEY.urn:
for sub_transform in transform_proto.subtransforms:
del proto_pipeline.components.transforms[sub_transform]
del transform_proto.subtransforms[:]
# Preemptively apply combiner lifting, until all runners support it.
# These optimizations commute and are idempotent.
pre_optimize = options.view_as(DebugOptions).lookup_experiment(
'pre_optimize', 'lift_combiners').lower()
if not options.view_as(StandardOptions).streaming:
flink_known_urns = frozenset([
common_urns.composites.RESHUFFLE.urn,
common_urns.primitives.IMPULSE.urn,
common_urns.primitives.FLATTEN.urn,
common_urns.primitives.GROUP_BY_KEY.urn
])
if pre_optimize == 'none':
pass
elif pre_optimize == 'all':
proto_pipeline = translations.optimize_pipeline(
proto_pipeline,
phases=[
translations.annotate_downstream_side_inputs,
translations.annotate_stateful_dofns_as_roots,
translations.fix_side_input_pcoll_coders,
translations.lift_combiners,
translations.expand_sdf,
translations.fix_flatten_coders,
# fn_api_runner_transforms.sink_flattens,
translations.greedily_fuse,
translations.read_to_impulse,
translations.extract_impulse_stages,
translations.remove_data_plane_ops,
translations.sort_stages
],
known_runner_urns=flink_known_urns)
else:
phases = []
for phase_name in pre_optimize.split(','):
# For now, these are all we allow.
if phase_name in 'lift_combiners':
phases.append(getattr(translations, phase_name))
else:
raise ValueError(
'Unknown or inapplicable phase for pre_optimize: %s' %
phase_name)
proto_pipeline = translations.optimize_pipeline(
proto_pipeline,
phases=phases,
known_runner_urns=flink_known_urns,
partial=True)
return proto_pipeline
def run_pipeline(self, pipeline, options):
# type: (Pipeline, PipelineOptions) -> PipelineResult
portable_options = options.view_as(PortableOptions)
# TODO: https://issues.apache.org/jira/browse/BEAM-5525
# portable runner specific default
if options.view_as(SetupOptions).sdk_location == 'default':
options.view_as(SetupOptions).sdk_location = 'container'
# This is needed as we start a worker server if one is requested
# but none is provided.
if portable_options.environment_type == 'LOOPBACK':
use_loopback_process_worker = options.view_as(
DebugOptions).lookup_experiment('use_loopback_process_worker', False)
portable_options.environment_config, server = (
worker_pool_main.BeamFnExternalWorkerPoolServicer.start(
state_cache_size=sdk_worker_main._get_state_cache_size(options),
data_buffer_time_limit_ms=
sdk_worker_main._get_data_buffer_time_limit_ms(options),
use_process=use_loopback_process_worker))
cleanup_callbacks = [functools.partial(server.stop, 1)]
else:
cleanup_callbacks = []
proto_pipeline = self.get_proto_pipeline(pipeline, options)
job_service_handle = self.create_job_service(options)
job_id, message_stream, state_stream = \
job_service_handle.submit(proto_pipeline)
result = PipelineResult(
job_service_handle.job_service,
job_id,
message_stream,
state_stream,
cleanup_callbacks)
if cleanup_callbacks:
# Register an exit handler to ensure cleanup on exit.
atexit.register(functools.partial(result._cleanup, on_exit=True))
_LOGGER.info(
'Environment "%s" has started a component necessary for the '
'execution. Be sure to run the pipeline using\n'
' with Pipeline() as p:\n'
' p.apply(..)\n'
'This ensures that the pipeline finishes before this program exits.',
portable_options.environment_type)
return result
class PortableMetrics(metric.MetricResults):
def __init__(self, job_metrics_response):
metrics = job_metrics_response.metrics
self.attempted = portable_metrics.from_monitoring_infos(metrics.attempted)
self.committed = portable_metrics.from_monitoring_infos(metrics.committed)
@staticmethod
def _combine(committed, attempted, filter):
all_keys = set(committed.keys()) | set(attempted.keys())
return [
MetricResult(key, committed.get(key), attempted.get(key))
for key in all_keys if metric.MetricResults.matches(filter, key)
]
def query(self, filter=None):
counters, distributions, gauges = [
self._combine(x, y, filter)
for x, y in zip(self.committed, self.attempted)
]
return {
self.COUNTERS: counters,
self.DISTRIBUTIONS: distributions,
self.GAUGES: gauges
}
class PipelineResult(runner.PipelineResult):
def __init__(
self,
job_service,
job_id,
message_stream,
state_stream,
cleanup_callbacks=()):
super(PipelineResult, self).__init__(beam_job_api_pb2.JobState.UNSPECIFIED)
self._job_service = job_service
self._job_id = job_id
self._messages = []
self._message_stream = message_stream
self._state_stream = state_stream
self._cleanup_callbacks = cleanup_callbacks
self._metrics = None
self._runtime_exception = None
def cancel(self):
try:
self._job_service.Cancel(
beam_job_api_pb2.CancelJobRequest(job_id=self._job_id))
finally:
self._cleanup()
@property
def state(self):
runner_api_state = self._job_service.GetState(
beam_job_api_pb2.GetJobStateRequest(job_id=self._job_id)).state
self._state = self._runner_api_state_to_pipeline_state(runner_api_state)
return self._state
@staticmethod
def _runner_api_state_to_pipeline_state(runner_api_state):
return getattr(
runner.PipelineState,
beam_job_api_pb2.JobState.Enum.Name(runner_api_state))
@staticmethod
def _pipeline_state_to_runner_api_state(pipeline_state):
return beam_job_api_pb2.JobState.Enum.Value(pipeline_state)
def metrics(self):
if not self._metrics:
job_metrics_response = self._job_service.GetJobMetrics(
beam_job_api_pb2.GetJobMetricsRequest(job_id=self._job_id))
self._metrics = PortableMetrics(job_metrics_response)
return self._metrics
def _last_error_message(self):
# Filter only messages with the "message_response" and error messages.
messages = [
m.message_response for m in self._messages
if m.HasField('message_response')
]
error_messages = [
m for m in messages
if m.importance == beam_job_api_pb2.JobMessage.JOB_MESSAGE_ERROR
]
if error_messages:
return error_messages[-1].message_text
else:
return 'unknown error'
def wait_until_finish(self, duration=None):
"""
:param duration: The maximum time in milliseconds to wait for the result of
the execution. If None or zero, will wait until the pipeline finishes.
:return: The result of the pipeline, i.e. PipelineResult.
"""
def read_messages():
previous_state = -1
for message in self._message_stream:
if message.HasField('message_response'):
logging.log(
MESSAGE_LOG_LEVELS[message.message_response.importance],
"%s",
message.message_response.message_text)
else:
current_state = message.state_response.state
if current_state != previous_state:
_LOGGER.info(
"Job state changed to %s",
self._runner_api_state_to_pipeline_state(current_state))
previous_state = current_state
self._messages.append(message)
message_thread = threading.Thread(
target=read_messages, name='wait_until_finish_read')
message_thread.daemon = True
message_thread.start()
if duration:
state_thread = threading.Thread(
target=functools.partial(self._observe_state, message_thread),
name='wait_until_finish_state_observer')
state_thread.daemon = True
state_thread.start()
start_time = time.time()
duration_secs = duration / 1000
while (time.time() - start_time < duration_secs and
state_thread.is_alive()):
time.sleep(1)
else:
self._observe_state(message_thread)
if self._runtime_exception:
raise self._runtime_exception
return self._state
def _observe_state(self, message_thread):
try:
for state_response in self._state_stream:
self._state = self._runner_api_state_to_pipeline_state(
state_response.state)
if state_response.state in TERMINAL_STATES:
# Wait for any last messages.
message_thread.join(10)
break
if self._state != runner.PipelineState.DONE:
self._runtime_exception = RuntimeError(
'Pipeline %s failed in state %s: %s' %
(self._job_id, self._state, self._last_error_message()))
except Exception as e:
self._runtime_exception = e
finally:
self._cleanup()
def _cleanup(self, on_exit=False):
if on_exit and self._cleanup_callbacks:
_LOGGER.info(
'Running cleanup on exit. If your pipeline should continue running, '
'be sure to use the following syntax:\n'
' with Pipeline() as p:\n'
' p.apply(..)\n'
'This ensures that the pipeline finishes before this program exits.')
has_exception = None
for callback in self._cleanup_callbacks:
try:
callback()
except Exception:
has_exception = True
self._cleanup_callbacks = ()
if has_exception:
raise
|
py | b4163539155b3824c0cb047c5aeecaab05760219 | import unittest
from joker.util.significant_bits import count_significant_bits, truncate_to_significant_bits
class TestSignificantBits(unittest.TestCase):
def test_truncate_to_significant_bits(self):
a = -0b001101
assert truncate_to_significant_bits(a, 2) == -0b1100
a = -0b001111
assert truncate_to_significant_bits(a, 2) == -0b1100
a = 0b1111
assert truncate_to_significant_bits(a, 2) == 0b1100
a = 0b1000000111
assert truncate_to_significant_bits(a, 8) == 0b1000000100
a = 0b1000000111
assert truncate_to_significant_bits(a, 0) == 0b0
a = 0b1000000111
assert truncate_to_significant_bits(a, 500) == a
a = -0b1000000111
assert truncate_to_significant_bits(a, 500) == a
a = 0b10101
assert truncate_to_significant_bits(a, 5) == a
a = 0b10101
assert truncate_to_significant_bits(a, 4) == 0b10100
def test_count_significant_bits(self):
assert count_significant_bits(0b0001) == 1
assert count_significant_bits(0b00010) == 1
assert count_significant_bits(0b01010) == 3
assert count_significant_bits(-0b01010) == 3
assert count_significant_bits(0b0) == 0
assert count_significant_bits(0b1) == 1
assert count_significant_bits(0b1000010101010000) == 12
if __name__ == "__main__":
unittest.main()
|
py | b4163774a99012393053bed67a857bf8119a1781 | # -*- coding: utf-8 -*-
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015, 2016 MinIO, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from nose.tools import eq_, timed
from unittest import TestCase
from minio import Minio
from minio.api import _DEFAULT_USER_AGENT
from .minio_mocks import MockResponse, MockConnection
class ListObjectsTest(TestCase):
@mock.patch('urllib3.PoolManager')
def test_empty_list_objects_works(self, mock_connection):
mock_data = '''<?xml version="1.0"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Name>bucket</Name>
<Prefix/>
<Marker/>
<IsTruncated>false</IsTruncated>
<MaxKeys>1000</MaxKeys>
<Delimiter/>
</ListBucketResult>
'''
mock_server = MockConnection()
mock_connection.return_value = mock_server
mock_server.mock_add_request(MockResponse('GET',
'https://localhost:9000/bucket/?prefix=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data))
client = Minio('localhost:9000')
bucket_iter = client.list_objects('bucket', recursive=True)
buckets = []
for bucket in bucket_iter:
buckets.append(bucket)
eq_(0, len(buckets))
@timed(1)
@mock.patch('urllib3.PoolManager')
def test_list_objects_works(self, mock_connection):
mock_data = '''<?xml version="1.0"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Name>bucket</Name>
<Prefix/>
<Marker/>
<MaxKeys>1000</MaxKeys>
<Delimiter/>
<IsTruncated>false</IsTruncated>
<Contents>
<Key>key1</Key>
<LastModified>2015-05-05T02:21:15.716Z</LastModified>
<ETag>5eb63bbbe01eeed093cb22bb8f5acdc3</ETag>
<Size>11</Size>
<StorageClass>STANDARD</StorageClass>
<Owner>
<ID>minio</ID>
<DisplayName>minio</DisplayName>
</Owner>
</Contents>
<Contents>
<Key>key2</Key>
<LastModified>2015-05-05T20:36:17.498Z</LastModified>
<ETag>2a60eaffa7a82804bdc682ce1df6c2d4</ETag>
<Size>1661</Size>
<StorageClass>STANDARD</StorageClass>
<Owner>
<ID>minio</ID>
<DisplayName>minio</DisplayName>
</Owner>
</Contents>
</ListBucketResult>
'''
mock_server = MockConnection()
mock_connection.return_value = mock_server
mock_server.mock_add_request(MockResponse('GET',
'https://localhost:9000/bucket/?delimiter=%2F&prefix=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data))
client = Minio('localhost:9000')
bucket_iter = client.list_objects('bucket')
buckets = []
for bucket in bucket_iter:
# cause an xml exception and fail if we try retrieving again
mock_server.mock_add_request(MockResponse('GET',
'https://localhost:9000/bucket/?delimiter=%2F&prefix=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=''))
buckets.append(bucket)
eq_(2, len(buckets))
@timed(1)
@mock.patch('urllib3.PoolManager')
def test_list_objects_works_well(self, mock_connection):
mock_data1 = '''<?xml version="1.0"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Name>bucket</Name>
<Prefix/>
<Marker />
<NextMarker>marker</NextMarker>
<MaxKeys>1000</MaxKeys>
<Delimiter/>
<IsTruncated>true</IsTruncated>
<Contents>
<Key>key1</Key>
<LastModified>2015-05-05T02:21:15.716Z</LastModified>
<ETag>5eb63bbbe01eeed093cb22bb8f5acdc3</ETag>
<Size>11</Size>
<StorageClass>STANDARD</StorageClass>
<Owner>
<ID>minio</ID>
<DisplayName>minio</DisplayName>
</Owner>
</Contents>
<Contents>
<Key>key2</Key>
<LastModified>2015-05-05T20:36:17.498Z</LastModified>
<ETag>2a60eaffa7a82804bdc682ce1df6c2d4</ETag>
<Size>1661</Size>
<StorageClass>STANDARD</StorageClass>
<Owner>
<ID>minio</ID>
<DisplayName>minio</DisplayName>
</Owner>
</Contents>
</ListBucketResult>
'''
mock_data2 = '''<?xml version="1.0"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Name>bucket</Name>
<Prefix/>
<Marker/>
<MaxKeys>1000</MaxKeys>
<Delimiter/>
<IsTruncated>false</IsTruncated>
<Contents>
<Key>key3</Key>
<LastModified>2015-05-05T02:21:15.716Z</LastModified>
<ETag>5eb63bbbe01eeed093cb22bb8f5acdc3</ETag>
<Size>11</Size>
<StorageClass>STANDARD</StorageClass>
<Owner>
<ID>minio</ID>
<DisplayName>minio</DisplayName>
</Owner>
</Contents>
<Contents>
<Key>key4</Key>
<LastModified>2015-05-05T20:36:17.498Z</LastModified>
<ETag>2a60eaffa7a82804bdc682ce1df6c2d4</ETag>
<Size>1661</Size>
<StorageClass>STANDARD</StorageClass>
<Owner>
<ID>minio</ID>
<DisplayName>minio</DisplayName>
</Owner>
</Contents>
</ListBucketResult>
'''
mock_server = MockConnection()
mock_connection.return_value = mock_server
mock_server.mock_add_request(MockResponse('GET',
'https://localhost:9000/bucket/?prefix=',
{'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data1))
client = Minio('localhost:9000')
bucket_iter = client.list_objects('bucket', recursive=True)
buckets = []
for bucket in bucket_iter:
url = 'https://localhost:9000/bucket/?marker=marker&prefix='
mock_server.mock_add_request(MockResponse('GET', url,
{'User-Agent': _DEFAULT_USER_AGENT}, 200,
content=mock_data2))
buckets.append(bucket)
eq_(4, len(buckets))
|
py | b41637c43f467ec652247da08a951f9811808630 | # -*- coding: utf-8 -*-
from openprocurement.tender.belowthreshold.tests.base import (
test_organization
)
# TenderQuestionResourceTest
def create_tender_question(self):
response = self.app.post_json('/tenders/{}/questions'.format(
self.tender_id),
{'data': {'title': 'question title', 'description': 'question description', 'author': test_organization}})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
question = response.json['data']
self.assertEqual(question['author']['name'], test_organization['name'])
self.assertIn('id', question)
self.assertIn(question['id'], response.headers['Location'])
self.go_to_enquiryPeriod_end()
response = self.app.post_json('/tenders/{}/questions'.format(
self.tender_id),
{'data': {'title': 'question title', 'description': 'question description', 'author': test_organization}},
status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can add question only in enquiryPeriod")
self.set_status('active.auction')
response = self.app.post_json('/tenders/{}/questions'.format(
self.tender_id),
{'data': {'title': 'question title', 'description': 'question description', 'author': test_organization}},
status=403)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(response.json['errors'][0]["description"], "Can add question only in enquiryPeriod")
# TenderLotQuestionResourceTest
def tender_has_unanswered_questions(self):
question_id = self.create_question_for("tender", self.tender_id)
self.set_status('active.auction', {'status': 'active.tendering'})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], 'active.tendering')
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(self.tender_id, self.tender_token), {'data': {
'reason': 'cancellation reason',
'status': 'active',
}})
self.assertEqual(response.status, '201 Created')
response = self.app.get('/tenders/{}'.format(self.tender_id))
self.assertEqual(response.json['data']['status'], 'cancelled')
def lot_has_unanswered_questions(self):
question_id = self.create_question_for("lot", self.initial_lots[0]['id'])
self.set_status('active.auction', {'status': 'active.tendering'})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], 'active.tendering')
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(self.tender_id, self.tender_token), {'data': {
'reason': 'cancellation reason',
'status': 'active',
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]['id']
}})
self.assertEqual(response.status, '201 Created')
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], 'unsuccessful')
def item_has_unanswered_questions(self):
items = self.app.get('/tenders/{}'.format(self.tender_id)).json['data']['items']
question_id = self.create_question_for("item", items[0]['id'])
self.set_status('active.auction', {'status': 'active.tendering'})
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], 'active.tendering')
self.app.authorization = ('Basic', ('broker', ''))
response = self.app.post_json('/tenders/{}/cancellations?acc_token={}'.format(self.tender_id, self.tender_token), {'data': {
'reason': 'cancellation reason',
'status': 'active',
"cancellationOf": "lot",
"relatedLot": self.initial_lots[0]['id']
}})
self.assertEqual(response.status, '201 Created')
self.app.authorization = ('Basic', ('chronograph', ''))
response = self.app.patch_json('/tenders/{}'.format(self.tender_id), {"data": {"id": self.tender_id}})
self.assertEqual(response.json['data']['status'], 'unsuccessful')
|
py | b41637f6ba644b7dc4e052d62b3896de5086b7fa | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='my_django_extension',
version='1.0',
description='Useful extensions for django',
long_description=open('README.md').read(),
author='aamishbaloch',
url='https://github.com/aamishbaloch/my-django-extensions',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==2.1.5',
'djangorestframework==3.9.1',
'pytz==2018.9',
],
)
|
py | b416386ec6cf8d6402fe77f284a9e7521905f377 | # -*- coding:utf-8 -*-
#
# Copyright (C) 2019 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittests for the project.py module."""
from __future__ import print_function
import contextlib
import os
import shutil
import subprocess
import tempfile
import unittest
import error
import git_config
import project
@contextlib.contextmanager
def TempGitTree():
"""Create a new empty git checkout for testing."""
# TODO(vapier): Convert this to tempfile.TemporaryDirectory once we drop
# Python 2 support entirely.
try:
tempdir = tempfile.mkdtemp(prefix='repo-tests')
subprocess.check_call(['git', 'init'], cwd=tempdir)
yield tempdir
finally:
shutil.rmtree(tempdir)
class RepoHookShebang(unittest.TestCase):
"""Check shebang parsing in RepoHook."""
def test_no_shebang(self):
"""Lines w/out shebangs should be rejected."""
DATA = (
'',
'# -*- coding:utf-8 -*-\n',
'#\n# foo\n',
'# Bad shebang in script\n#!/foo\n'
)
for data in DATA:
self.assertIsNone(project.RepoHook._ExtractInterpFromShebang(data))
def test_direct_interp(self):
"""Lines whose shebang points directly to the interpreter."""
DATA = (
('#!/foo', '/foo'),
('#! /foo', '/foo'),
('#!/bin/foo ', '/bin/foo'),
('#! /usr/foo ', '/usr/foo'),
('#! /usr/foo -args', '/usr/foo'),
)
for shebang, interp in DATA:
self.assertEqual(project.RepoHook._ExtractInterpFromShebang(shebang),
interp)
def test_env_interp(self):
"""Lines whose shebang launches through `env`."""
DATA = (
('#!/usr/bin/env foo', 'foo'),
('#!/bin/env foo', 'foo'),
('#! /bin/env /bin/foo ', '/bin/foo'),
)
for shebang, interp in DATA:
self.assertEqual(project.RepoHook._ExtractInterpFromShebang(shebang),
interp)
class FakeProject(object):
"""A fake for Project for basic functionality."""
def __init__(self, worktree):
self.worktree = worktree
self.gitdir = os.path.join(worktree, '.git')
self.name = 'fakeproject'
self.work_git = project.Project._GitGetByExec(
self, bare=False, gitdir=self.gitdir)
self.bare_git = project.Project._GitGetByExec(
self, bare=True, gitdir=self.gitdir)
self.config = git_config.GitConfig.ForRepository(gitdir=self.gitdir)
class ReviewableBranchTests(unittest.TestCase):
"""Check ReviewableBranch behavior."""
def test_smoke(self):
"""A quick run through everything."""
with TempGitTree() as tempdir:
fakeproj = FakeProject(tempdir)
# Generate some commits.
with open(os.path.join(tempdir, 'readme'), 'w') as fp:
fp.write('txt')
fakeproj.work_git.add('readme')
fakeproj.work_git.commit('-mAdd file')
fakeproj.work_git.checkout('-b', 'work')
fakeproj.work_git.rm('-f', 'readme')
fakeproj.work_git.commit('-mDel file')
# Start off with the normal details.
rb = project.ReviewableBranch(
fakeproj, fakeproj.config.GetBranch('work'), 'master')
self.assertEqual('work', rb.name)
self.assertEqual(1, len(rb.commits))
self.assertIn('Del file', rb.commits[0])
d = rb.unabbrev_commits
self.assertEqual(1, len(d))
short, long = next(iter(d.items()))
self.assertTrue(long.startswith(short))
self.assertTrue(rb.base_exists)
# Hard to assert anything useful about this.
self.assertTrue(rb.date)
# Now delete the tracking branch!
fakeproj.work_git.branch('-D', 'master')
rb = project.ReviewableBranch(
fakeproj, fakeproj.config.GetBranch('work'), 'master')
self.assertEqual(0, len(rb.commits))
self.assertFalse(rb.base_exists)
# Hard to assert anything useful about this.
self.assertTrue(rb.date)
class CopyLinkTestCase(unittest.TestCase):
"""TestCase for stub repo client checkouts.
It'll have a layout like:
tempdir/ # self.tempdir
checkout/ # self.topdir
git-project/ # self.worktree
Attributes:
tempdir: A dedicated temporary directory.
worktree: The top of the repo client checkout.
topdir: The top of a project checkout.
"""
def setUp(self):
self.tempdir = tempfile.mkdtemp(prefix='repo_tests')
self.topdir = os.path.join(self.tempdir, 'checkout')
self.worktree = os.path.join(self.topdir, 'git-project')
os.makedirs(self.topdir)
os.makedirs(self.worktree)
def tearDown(self):
shutil.rmtree(self.tempdir, ignore_errors=True)
@staticmethod
def touch(path):
with open(path, 'w'):
pass
def assertExists(self, path, msg=None):
"""Make sure |path| exists."""
if os.path.exists(path):
return
if msg is None:
msg = ['path is missing: %s' % path]
while path != '/':
path = os.path.dirname(path)
if not path:
# If we're given something like "foo", abort once we get to "".
break
result = os.path.exists(path)
msg.append('\tos.path.exists(%s): %s' % (path, result))
if result:
msg.append('\tcontents: %r' % os.listdir(path))
break
msg = '\n'.join(msg)
raise self.failureException(msg)
class CopyFile(CopyLinkTestCase):
"""Check _CopyFile handling."""
def CopyFile(self, src, dest):
return project._CopyFile(self.worktree, src, self.topdir, dest)
def test_basic(self):
"""Basic test of copying a file from a project to the toplevel."""
src = os.path.join(self.worktree, 'foo.txt')
self.touch(src)
cf = self.CopyFile('foo.txt', 'foo')
cf._Copy()
self.assertExists(os.path.join(self.topdir, 'foo'))
def test_src_subdir(self):
"""Copy a file from a subdir of a project."""
src = os.path.join(self.worktree, 'bar', 'foo.txt')
os.makedirs(os.path.dirname(src))
self.touch(src)
cf = self.CopyFile('bar/foo.txt', 'new.txt')
cf._Copy()
self.assertExists(os.path.join(self.topdir, 'new.txt'))
def test_dest_subdir(self):
"""Copy a file to a subdir of a checkout."""
src = os.path.join(self.worktree, 'foo.txt')
self.touch(src)
cf = self.CopyFile('foo.txt', 'sub/dir/new.txt')
self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
cf._Copy()
self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'new.txt'))
def test_update(self):
"""Make sure changed files get copied again."""
src = os.path.join(self.worktree, 'foo.txt')
dest = os.path.join(self.topdir, 'bar')
with open(src, 'w') as f:
f.write('1st')
cf = self.CopyFile('foo.txt', 'bar')
cf._Copy()
self.assertExists(dest)
with open(dest) as f:
self.assertEqual(f.read(), '1st')
with open(src, 'w') as f:
f.write('2nd!')
cf._Copy()
with open(dest) as f:
self.assertEqual(f.read(), '2nd!')
def test_src_block_symlink(self):
"""Do not allow reading from a symlinked path."""
src = os.path.join(self.worktree, 'foo.txt')
sym = os.path.join(self.worktree, 'sym')
self.touch(src)
os.symlink('foo.txt', sym)
self.assertExists(sym)
cf = self.CopyFile('sym', 'foo')
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
def test_src_block_symlink_traversal(self):
"""Do not allow reading through a symlink dir."""
src = os.path.join(self.worktree, 'bar', 'passwd')
os.symlink('/etc', os.path.join(self.worktree, 'bar'))
self.assertExists(src)
cf = self.CopyFile('bar/foo.txt', 'foo')
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
def test_src_block_copy_from_dir(self):
"""Do not allow copying from a directory."""
src = os.path.join(self.worktree, 'dir')
os.makedirs(src)
cf = self.CopyFile('dir', 'foo')
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
def test_dest_block_symlink(self):
"""Do not allow writing to a symlink."""
src = os.path.join(self.worktree, 'foo.txt')
self.touch(src)
os.symlink('dest', os.path.join(self.topdir, 'sym'))
cf = self.CopyFile('foo.txt', 'sym')
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
def test_dest_block_symlink_traversal(self):
"""Do not allow writing through a symlink dir."""
src = os.path.join(self.worktree, 'foo.txt')
self.touch(src)
os.symlink('/tmp', os.path.join(self.topdir, 'sym'))
cf = self.CopyFile('foo.txt', 'sym/foo.txt')
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
def test_src_block_copy_to_dir(self):
"""Do not allow copying to a directory."""
src = os.path.join(self.worktree, 'foo.txt')
self.touch(src)
os.makedirs(os.path.join(self.topdir, 'dir'))
cf = self.CopyFile('foo.txt', 'dir')
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
class LinkFile(CopyLinkTestCase):
"""Check _LinkFile handling."""
def LinkFile(self, src, dest):
return project._LinkFile(self.worktree, src, self.topdir, dest)
def test_basic(self):
"""Basic test of linking a file from a project into the toplevel."""
src = os.path.join(self.worktree, 'foo.txt')
self.touch(src)
lf = self.LinkFile('foo.txt', 'foo')
lf._Link()
dest = os.path.join(self.topdir, 'foo')
self.assertExists(dest)
self.assertTrue(os.path.islink(dest))
self.assertEqual('git-project/foo.txt', os.readlink(dest))
def test_src_subdir(self):
"""Link to a file in a subdir of a project."""
src = os.path.join(self.worktree, 'bar', 'foo.txt')
os.makedirs(os.path.dirname(src))
self.touch(src)
lf = self.LinkFile('bar/foo.txt', 'foo')
lf._Link()
self.assertExists(os.path.join(self.topdir, 'foo'))
def test_src_self(self):
"""Link to the project itself."""
dest = os.path.join(self.topdir, 'foo', 'bar')
lf = self.LinkFile('.', 'foo/bar')
lf._Link()
self.assertExists(dest)
self.assertEqual('../git-project', os.readlink(dest))
def test_dest_subdir(self):
"""Link a file to a subdir of a checkout."""
src = os.path.join(self.worktree, 'foo.txt')
self.touch(src)
lf = self.LinkFile('foo.txt', 'sub/dir/foo/bar')
self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
lf._Link()
self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'foo', 'bar'))
def test_src_block_relative(self):
"""Do not allow relative symlinks."""
BAD_SOURCES = (
'./',
'..',
'../',
'foo/.',
'foo/./bar',
'foo/..',
'foo/../foo',
)
for src in BAD_SOURCES:
lf = self.LinkFile(src, 'foo')
self.assertRaises(error.ManifestInvalidPathError, lf._Link)
def test_update(self):
"""Make sure changed targets get updated."""
dest = os.path.join(self.topdir, 'sym')
src = os.path.join(self.worktree, 'foo.txt')
self.touch(src)
lf = self.LinkFile('foo.txt', 'sym')
lf._Link()
self.assertEqual('git-project/foo.txt', os.readlink(dest))
# Point the symlink somewhere else.
os.unlink(dest)
os.symlink('/', dest)
lf._Link()
self.assertEqual('git-project/foo.txt', os.readlink(dest))
|
py | b4163892160c77f11c7a28adac10d601f89c28d8 | # Copyright (c) 2015-2020 Vector 35 Inc
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import ctypes
# Binary Ninja components
import binaryninja
from binaryninja import _binaryninjacore as core
from binaryninja import types
# 2-3 compatibility
from binaryninja import range
from binaryninja import with_metaclass
class _PlatformMetaClass(type):
@property
def list(self):
binaryninja._init_plugins()
count = ctypes.c_ulonglong()
platforms = core.BNGetPlatformList(count)
result = []
for i in range(0, count.value):
result.append(Platform(handle = core.BNNewPlatformReference(platforms[i])))
core.BNFreePlatformList(platforms, count.value)
return result
@property
def os_list(self):
binaryninja._init_plugins()
count = ctypes.c_ulonglong()
platforms = core.BNGetPlatformOSList(count)
result = []
for i in range(0, count.value):
result.append(str(platforms[i]))
core.BNFreePlatformOSList(platforms, count.value)
return result
def __iter__(self):
binaryninja._init_plugins()
count = ctypes.c_ulonglong()
platforms = core.BNGetPlatformList(count)
try:
for i in range(0, count.value):
yield Platform(handle = core.BNNewPlatformReference(platforms[i]))
finally:
core.BNFreePlatformList(platforms, count.value)
def __setattr__(self, name, value):
try:
type.__setattr__(self, name, value)
except AttributeError:
raise AttributeError("attribute '%s' is read only" % name)
def __getitem__(cls, value):
binaryninja._init_plugins()
platform = core.BNGetPlatformByName(str(value))
if platform is None:
raise KeyError("'%s' is not a valid platform" % str(value))
return Platform(handle = platform)
def get_list(cls, os = None, arch = None):
binaryninja._init_plugins()
count = ctypes.c_ulonglong()
if os is None:
platforms = core.BNGetPlatformList(count)
elif arch is None:
platforms = core.BNGetPlatformListByOS(os)
else:
platforms = core.BNGetPlatformListByArchitecture(os, arch.handle)
result = []
for i in range(0, count.value):
result.append(Platform(handle = core.BNNewPlatformReference(platforms[i])))
core.BNFreePlatformList(platforms, count.value)
return result
class Platform(with_metaclass(_PlatformMetaClass, object)):
"""
``class Platform`` contains all information related to the execution environment of the binary, mainly the
calling conventions used.
"""
name = None
def __init__(self, arch = None, handle = None):
if handle is None:
if arch is None:
self.handle = None
raise ValueError("platform must have an associated architecture")
self._arch = arch
self.handle = core.BNCreatePlatform(arch.handle, self.__class__.name)
else:
self.handle = handle
self.__dict__["name"] = core.BNGetPlatformName(self.handle)
self._arch = binaryninja.architecture.CoreArchitecture._from_cache(core.BNGetPlatformArchitecture(self.handle))
def __del__(self):
if self.handle is not None:
core.BNFreePlatform(self.handle)
def __eq__(self, value):
if not isinstance(value, Platform):
return False
return ctypes.addressof(self.handle.contents) == ctypes.addressof(value.handle.contents)
def __ne__(self, value):
if not isinstance(value, Platform):
return True
return ctypes.addressof(self.handle.contents) != ctypes.addressof(value.handle.contents)
@property
def list(self):
"""Allow tab completion to discover metaclass list property"""
pass
@property
def default_calling_convention(self):
"""
Default calling convention.
:getter: returns a CallingConvention object for the default calling convention.
:setter: sets the default calling convention
:type: CallingConvention
"""
result = core.BNGetPlatformDefaultCallingConvention(self.handle)
if result is None:
return None
return binaryninja.callingconvention.CallingConvention(handle=result)
@default_calling_convention.setter
def default_calling_convention(self, value):
core.BNRegisterPlatformDefaultCallingConvention(self.handle, value.handle)
@property
def cdecl_calling_convention(self):
"""
Cdecl calling convention.
:getter: returns a CallingConvention object for the cdecl calling convention.
:setter sets the cdecl calling convention
:type: CallingConvention
"""
result = core.BNGetPlatformCdeclCallingConvention(self.handle)
if result is None:
return None
return binaryninja.callingconvention.CallingConvention(handle=result)
@cdecl_calling_convention.setter
def cdecl_calling_convention(self, value):
core.BNRegisterPlatformCdeclCallingConvention(self.handle, value.handle)
@property
def stdcall_calling_convention(self):
"""
Stdcall calling convention.
:getter: returns a CallingConvention object for the stdcall calling convention.
:setter sets the stdcall calling convention
:type: CallingConvention
"""
result = core.BNGetPlatformStdcallCallingConvention(self.handle)
if result is None:
return None
return binaryninja.callingconvention.CallingConvention(handle=result)
@stdcall_calling_convention.setter
def stdcall_calling_convention(self, value):
core.BNRegisterPlatformStdcallCallingConvention(self.handle, value.handle)
@property
def fastcall_calling_convention(self):
"""
Fastcall calling convention.
:getter: returns a CallingConvention object for the fastcall calling convention.
:setter sets the fastcall calling convention
:type: CallingConvention
"""
result = core.BNGetPlatformFastcallCallingConvention(self.handle)
if result is None:
return None
return binaryninja.callingconvention.CallingConvention(handle=result)
@fastcall_calling_convention.setter
def fastcall_calling_convention(self, value):
core.BNRegisterPlatformFastcallCallingConvention(self.handle, value.handle)
@property
def system_call_convention(self):
"""
System call convention.
:getter: returns a CallingConvention object for the system call convention.
:setter sets the system call convention
:type: CallingConvention
"""
result = core.BNGetPlatformSystemCallConvention(self.handle)
if result is None:
return None
return binaryninja.callingconvention.CallingConvention(handle=result)
@system_call_convention.setter
def system_call_convention(self, value):
core.BNSetPlatformSystemCallConvention(self.handle, value.handle)
@property
def calling_conventions(self):
"""
List of platform CallingConvention objects (read-only)
:getter: returns the list of supported CallingConvention objects
:type: list(CallingConvention)
"""
count = ctypes.c_ulonglong()
cc = core.BNGetPlatformCallingConventions(self.handle, count)
result = []
for i in range(0, count.value):
result.append(binaryninja.callingconvention.CallingConvention(handle=core.BNNewCallingConventionReference(cc[i])))
core.BNFreeCallingConventionList(cc, count.value)
return result
@property
def types(self):
"""List of platform-specific types (read-only)"""
count = ctypes.c_ulonglong(0)
type_list = core.BNGetPlatformTypes(self.handle, count)
result = {}
for i in range(0, count.value):
name = types.QualifiedName._from_core_struct(type_list[i].name)
result[name] = types.Type(core.BNNewTypeReference(type_list[i].type), platform = self)
core.BNFreeTypeList(type_list, count.value)
return result
@property
def variables(self):
"""List of platform-specific variable definitions (read-only)"""
count = ctypes.c_ulonglong(0)
type_list = core.BNGetPlatformVariables(self.handle, count)
result = {}
for i in range(0, count.value):
name = types.QualifiedName._from_core_struct(type_list[i].name)
result[name] = types.Type(core.BNNewTypeReference(type_list[i].type), platform = self)
core.BNFreeTypeList(type_list, count.value)
return result
@property
def functions(self):
"""List of platform-specific function definitions (read-only)"""
count = ctypes.c_ulonglong(0)
type_list = core.BNGetPlatformFunctions(self.handle, count)
result = {}
for i in range(0, count.value):
name = types.QualifiedName._from_core_struct(type_list[i].name)
result[name] = types.Type(core.BNNewTypeReference(type_list[i].type), platform = self)
core.BNFreeTypeList(type_list, count.value)
return result
@property
def system_calls(self):
"""List of system calls for this platform (read-only)"""
count = ctypes.c_ulonglong(0)
call_list = core.BNGetPlatformSystemCalls(self.handle, count)
result = {}
for i in range(0, count.value):
name = types.QualifiedName._from_core_struct(call_list[i].name)
t = types.Type(core.BNNewTypeReference(call_list[i].type), platform = self)
result[call_list[i].number] = (name, t)
core.BNFreeSystemCallList(call_list, count.value)
return result
@property
def type_libraries(self):
count = ctypes.c_ulonglong(0)
libs = core.BNGetPlatformTypeLibraries(self.handle, count)
result = []
for i in range(0, count.value):
result.append(binaryninja.TypeLibrary(core.BNNewTypeLibraryReference(libs[i])))
core.BNFreeTypeLibraryList(libs, count.value)
return result
def get_type_libraries_by_name(self, name):
count = ctypes.c_ulonglong(0)
libs = core.BNGetPlatformTypeLibrariesByName(self.handle, name, count)
result = []
for i in range(0, count.value):
result.append(binaryninja.TypeLibrary(core.BNNewTypeLibraryReference(libs[i])))
core.BNFreeTypeLibraryList(libs, count.value)
return result
def __setattr__(self, name, value):
try:
object.__setattr__(self, name, value)
except AttributeError:
raise AttributeError("attribute '%s' is read only" % name)
def __repr__(self):
return "<platform: %s>" % self.name
def __str__(self):
return self.name
def register(self, os):
"""
``register`` registers the platform for given OS name.
:param str os: OS name to register
:rtype: None
"""
core.BNRegisterPlatform(os, self.handle)
def register_calling_convention(self, cc):
"""
``register_calling_convention`` register a new calling convention.
:param CallingConvention cc: a CallingConvention object to register
:rtype: None
"""
core.BNRegisterPlatformCallingConvention(self.handle, cc.handle)
def get_related_platform(self, arch):
result = core.BNGetRelatedPlatform(self.handle, arch.handle)
if not result:
return None
return Platform(handle = result)
def add_related_platform(self, arch, platform):
core.BNAddRelatedPlatform(self.handle, arch.handle, platform.handle)
def get_associated_platform_by_address(self, addr):
new_addr = ctypes.c_ulonglong()
new_addr.value = addr
result = core.BNGetAssociatedPlatformByAddress(self.handle, new_addr)
return Platform(handle = result), new_addr.value
def get_type_by_name(self, name):
name = types.QualifiedName(name)._get_core_struct()
obj = core.BNGetPlatformTypeByName(self.handle, name)
if not obj:
return None
return types.Type(obj, platform = self)
def get_variable_by_name(self, name):
name = types.QualifiedName(name)._get_core_struct()
obj = core.BNGetPlatformVariableByName(self.handle, name)
if not obj:
return None
return types.Type(obj, platform = self)
def get_function_by_name(self, name, exactMatch=False):
name = types.QualifiedName(name)._get_core_struct()
obj = core.BNGetPlatformFunctionByName(self.handle, name, exactMatch)
if not obj:
return None
return types.Type(obj, platform = self)
def get_system_call_name(self, number):
return core.BNGetPlatformSystemCallName(self.handle, number)
def get_system_call_type(self, number):
obj = core.BNGetPlatformSystemCallType(self.handle, number)
if not obj:
return None
return types.Type(obj, platform = self)
def generate_auto_platform_type_id(self, name):
name = types.QualifiedName(name)._get_core_struct()
return core.BNGenerateAutoPlatformTypeId(self.handle, name)
def generate_auto_platform_type_ref(self, type_class, name):
type_id = self.generate_auto_platform_type_id(name)
return types.NamedTypeReference(type_class, type_id, name)
def get_auto_platform_type_id_source(self):
return core.BNGetAutoPlatformTypeIdSource(self.handle)
def parse_types_from_source(self, source, filename=None, include_dirs=[], auto_type_source=None):
"""
``parse_types_from_source`` parses the source string and any needed headers searching for them in
the optional list of directories provided in ``include_dirs``.
:param str source: source string to be parsed
:param str filename: optional source filename
:param include_dirs: optional list of string filename include directories
:type include_dirs: list(str)
:param str auto_type_source: optional source of types if used for automatically generated types
:return: :py:class:`TypeParserResult` (a SyntaxError is thrown on parse error)
:rtype: TypeParserResult
:Example:
>>> platform.parse_types_from_source('int foo;\\nint bar(int x);\\nstruct bas{int x,y;};\\n')
({types: {'bas': <type: struct bas>}, variables: {'foo': <type: int32_t>}, functions:{'bar':
<type: int32_t(int32_t x)>}}, '')
>>>
"""
if filename is None:
filename = "input"
dir_buf = (ctypes.c_char_p * len(include_dirs))()
for i in range(0, len(include_dirs)):
dir_buf[i] = include_dirs[i].encode('charmap')
parse = core.BNTypeParserResult()
errors = ctypes.c_char_p()
result = core.BNParseTypesFromSource(self.handle, source, filename, parse, errors, dir_buf,
len(include_dirs), auto_type_source)
error_str = errors.value
core.BNFreeString(ctypes.cast(errors, ctypes.POINTER(ctypes.c_byte)))
if not result:
raise SyntaxError(error_str)
type_dict = {}
variables = {}
functions = {}
for i in range(0, parse.typeCount):
name = types.QualifiedName._from_core_struct(parse.types[i].name)
type_dict[name] = types.Type(core.BNNewTypeReference(parse.types[i].type), platform = self)
for i in range(0, parse.variableCount):
name = types.QualifiedName._from_core_struct(parse.variables[i].name)
variables[name] = types.Type(core.BNNewTypeReference(parse.variables[i].type), platform = self)
for i in range(0, parse.functionCount):
name = types.QualifiedName._from_core_struct(parse.functions[i].name)
functions[name] = types.Type(core.BNNewTypeReference(parse.functions[i].type), platform = self)
core.BNFreeTypeParserResult(parse)
return types.TypeParserResult(type_dict, variables, functions)
def parse_types_from_source_file(self, filename, include_dirs=[], auto_type_source=None):
"""
``parse_types_from_source_file`` parses the source file ``filename`` and any needed headers searching for them in
the optional list of directories provided in ``include_dirs``.
:param str filename: filename of file to be parsed
:param include_dirs: optional list of string filename include directories
:type include_dirs: list(str)
:param str auto_type_source: optional source of types if used for automatically generated types
:return: :py:class:`TypeParserResult` (a SyntaxError is thrown on parse error)
:rtype: TypeParserResult
:Example:
>>> file = "/Users/binja/tmp.c"
>>> open(file).read()
'int foo;\\nint bar(int x);\\nstruct bas{int x,y;};\\n'
>>> platform.parse_types_from_source_file(file)
({types: {'bas': <type: struct bas>}, variables: {'foo': <type: int32_t>}, functions:
{'bar': <type: int32_t(int32_t x)>}}, '')
>>>
"""
dir_buf = (ctypes.c_char_p * len(include_dirs))()
for i in range(0, len(include_dirs)):
dir_buf[i] = include_dirs[i].encode('charmap')
parse = core.BNTypeParserResult()
errors = ctypes.c_char_p()
result = core.BNParseTypesFromSourceFile(self.handle, filename, parse, errors, dir_buf,
len(include_dirs), auto_type_source)
error_str = errors.value
core.BNFreeString(ctypes.cast(errors, ctypes.POINTER(ctypes.c_byte)))
if not result:
raise SyntaxError(error_str)
type_dict = {}
variables = {}
functions = {}
for i in range(0, parse.typeCount):
name = types.QualifiedName._from_core_struct(parse.types[i].name)
type_dict[name] = types.Type(core.BNNewTypeReference(parse.types[i].type), platform = self)
for i in range(0, parse.variableCount):
name = types.QualifiedName._from_core_struct(parse.variables[i].name)
variables[name] = types.Type(core.BNNewTypeReference(parse.variables[i].type), platform = self)
for i in range(0, parse.functionCount):
name = types.QualifiedName._from_core_struct(parse.functions[i].name)
functions[name] = types.Type(core.BNNewTypeReference(parse.functions[i].type), platform = self)
core.BNFreeTypeParserResult(parse)
return types.TypeParserResult(type_dict, variables, functions)
@property
def arch(self):
""" """
return self._arch
@arch.setter
def arch(self, value):
self._arch = value
|
py | b41639087316d16d09fef29a876884dde85de203 | # Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
from openvino.tools.mo.front.common.partial_infer.utils import int64_array
from openvino.tools.mo.front.extractor import FrontExtractorOp
from openvino.tools.mo.front.onnx.extractors.utils import onnx_attr
from openvino.tools.mo.ops.squeeze import Squeeze
class SqueezeFrontExtractor(FrontExtractorOp):
op = 'Squeeze'
enabled = True
@classmethod
def extract(cls, node):
axis = int64_array(onnx_attr(node, 'axes', 'ints', default=[]))
attrs = {
'squeeze_dims': axis if len(axis) != 0 else None
}
# update the attributes of the node
Squeeze.update_node_stat(node, attrs)
return cls.enabled
|
py | b416393e1371e715b0d6a35cbbc78eb826ccac20 | # Generated by Django 3.1.1 on 2020-09-23 15:26
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('questions', '0015_auto_20200923_1411'),
]
operations = [
migrations.AlterField(
model_name='question',
name='user',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.CreateModel(
name='QuestionVote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_vote', models.CharField(blank=True, default='draft', max_length=10, null=True)),
('question', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='questions.question')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='AnswerVote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('answer_vote', models.CharField(blank=True, default='draft', max_length=10, null=True)),
('answer', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='questions.questionanswer')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
py | b416396e38faf9bea5805bc32c000333eb129631 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for owners.py."""
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from testing_support import filesystem_mock
import owners
ben = '[email protected]'
brett = '[email protected]'
darin = '[email protected]'
jochen = '[email protected]'
john = '[email protected]'
ken = '[email protected]'
peter = '[email protected]'
tom = '[email protected]'
def owners_file(*email_addresses, **kwargs):
s = ''
if kwargs.get('comment'):
s += '# %s\n' % kwargs.get('comment')
if kwargs.get('noparent'):
s += 'set noparent\n'
if kwargs.get('file'):
s += 'file:%s\n' % kwargs.get('file')
if kwargs.get('lines'):
s += '\n'.join(kwargs.get('lines', [])) + '\n'
return s + '\n'.join(email_addresses) + '\n'
def test_repo():
return filesystem_mock.MockFileSystem(files={
'/DEPS' : '',
'/OWNERS': owners_file(owners.EVERYONE),
'/base/vlog.h': '',
'/chrome/OWNERS': owners_file(ben, brett),
'/chrome/browser/OWNERS': owners_file(brett),
'/chrome/browser/defaults.h': '',
'/chrome/gpu/OWNERS': owners_file(ken),
'/chrome/gpu/gpu_channel.h': '',
'/chrome/renderer/OWNERS': owners_file(peter),
'/chrome/renderer/gpu/gpu_channel_host.h': '',
'/chrome/renderer/safe_browsing/scorer.h': '',
'/content/OWNERS': owners_file(john, darin, comment='foo', noparent=True),
'/content/content.gyp': '',
'/content/bar/foo.cc': '',
'/content/baz/OWNERS': owners_file(brett),
'/content/baz/froboz.h': '',
'/content/baz/ugly.cc': '',
'/content/baz/ugly.h': '',
'/content/garply/OWNERS': owners_file(file='test/OWNERS'),
'/content/garply/foo.cc': '',
'/content/garply/test/OWNERS': owners_file(peter),
'/content/qux/OWNERS': owners_file(peter, file='//content/baz/OWNERS'),
'/content/qux/foo.cc': '',
'/content/views/OWNERS': owners_file(ben, john, owners.EVERYONE,
noparent=True),
'/content/views/pie.h': '',
})
class _BaseTestCase(unittest.TestCase):
def setUp(self):
self.repo = test_repo()
self.files = self.repo.files
self.root = '/'
self.fopen = self.repo.open_for_reading
def db(self, root=None, fopen=None, os_path=None):
root = root or self.root
fopen = fopen or self.fopen
os_path = os_path or self.repo
# pylint: disable=no-value-for-parameter
return owners.Database(root, fopen, os_path)
class OwnersDatabaseTest(_BaseTestCase):
def test_constructor(self):
self.assertNotEquals(self.db(), None)
def test_files_not_covered_by__valid_inputs(self):
db = self.db()
# Check that we're passed in a sequence that isn't a string.
self.assertRaises(AssertionError, db.files_not_covered_by, 'foo', [])
if hasattr(owners.collections, 'Iterable'):
self.assertRaises(AssertionError, db.files_not_covered_by,
(f for f in ['x', 'y']), [])
# Check that the files are under the root.
db.root = '/checkout'
self.assertRaises(AssertionError, db.files_not_covered_by,
['/OWNERS'], [])
db.root = '/'
# Check invalid email address.
self.assertRaises(AssertionError, db.files_not_covered_by,
['OWNERS'], ['foo'])
def assert_files_not_covered_by(self, files, reviewers, unreviewed_files):
db = self.db()
self.assertEquals(db.files_not_covered_by(set(files), set(reviewers)),
set(unreviewed_files))
def test_files_not_covered_by__owners_propagates_down(self):
self.assert_files_not_covered_by(
['chrome/gpu/gpu_channel.h', 'chrome/renderer/gpu/gpu_channel_host.h'],
[ben], [])
def test_files_not_covered_by__partial_covering(self):
self.assert_files_not_covered_by(
['content/content.gyp', 'chrome/renderer/gpu/gpu_channel_host.h'],
[peter], ['content/content.gyp'])
def test_files_not_covered_by__set_noparent_works(self):
self.assert_files_not_covered_by(['content/content.gyp'], [ben],
['content/content.gyp'])
def test_files_not_covered_by__no_reviewer(self):
self.assert_files_not_covered_by(
['content/content.gyp', 'chrome/renderer/gpu/gpu_channel_host.h'],
[], ['content/content.gyp'])
def test_files_not_covered_by__combines_directories(self):
self.assert_files_not_covered_by(['content/content.gyp',
'content/bar/foo.cc',
'chrome/renderer/gpu/gpu_channel_host.h'],
[peter],
['content/content.gyp',
'content/bar/foo.cc'])
def test_files_not_covered_by__multiple_directories(self):
self.assert_files_not_covered_by(
['content/content.gyp', # Not covered
'content/bar/foo.cc', # Not covered (combines in)
'content/baz/froboz.h', # Not covered
'chrome/gpu/gpu_channel.h', # Owned by ken
'chrome/renderer/gpu/gpu_channel_host.h' # Owned by * via parent
],
[ken],
['content/content.gyp', 'content/bar/foo.cc', 'content/baz/froboz.h'])
def test_per_file(self):
self.files['/content/baz/OWNERS'] = owners_file(brett,
lines=['per-file ugly.*[email protected]'])
# peter isn't allowed to approve ugly.cc
self.assert_files_not_covered_by(['content/baz/ugly.cc'],
[peter],
['content/baz/ugly.cc'])
# brett is allowed to approve ugly.cc
self.assert_files_not_covered_by(['content/baz/ugly.cc'],
[brett],
[])
# tom is allowed to approve ugly.cc, but not froboz.h
self.assert_files_not_covered_by(['content/baz/ugly.cc'],
[tom],
[])
self.assert_files_not_covered_by(['content/baz/froboz.h'],
[tom],
['content/baz/froboz.h'])
def test_per_file_with_spaces(self):
# This is the same as test_per_file(), except that we include spaces
# on the per-file line.
# tom is allowed to approve ugly.cc, but not froboz.h
self.files['/content/baz/OWNERS'] = owners_file(brett,
lines=['per-file ugly.* = [email protected]'])
# peter isn't allowed to approve ugly.cc
self.assert_files_not_covered_by(['content/baz/ugly.cc'],
[peter],
['content/baz/ugly.cc'])
# brett is allowed to approve ugly.cc
self.assert_files_not_covered_by(['content/baz/ugly.cc'],
[brett],
[])
# tom is allowed to approve ugly.cc, but not froboz.h
self.assert_files_not_covered_by(['content/baz/ugly.cc'],
[tom],
[])
self.assert_files_not_covered_by(['content/baz/froboz.h'],
[tom],
['content/baz/froboz.h'])
def test_per_file_with_nonexistent_file(self):
self.files['/content/baz/OWNERS'] = owners_file(brett,
lines=['per-file ugly.*[email protected]'])
# peter isn't allowed to approve ugly.nonexistent.cc, but brett and tom are.
self.assert_files_not_covered_by(['content/baz/ugly.nonexistent.cc'],
[peter],
['content/baz/ugly.nonexistent.cc'])
self.assert_files_not_covered_by(['content/baz/ugly.nonexistent.cc'],
[brett],
[])
self.assert_files_not_covered_by(['content/baz/ugly.nonexistent.cc'],
[tom],
[])
def test_per_file__set_noparent(self):
self.files['/content/baz/OWNERS'] = owners_file(brett,
lines=['per-file ugly.*[email protected]',
'per-file ugly.*=set noparent'])
# brett isn't allowed to approve ugly.cc
self.assert_files_not_covered_by(['content/baz/ugly.cc'],
[brett],
['content/baz/ugly.cc'])
# tom is allowed to approve ugly.cc, but not froboz.h
self.assert_files_not_covered_by(['content/baz/ugly.cc'],
[tom],
[])
self.assert_files_not_covered_by(['content/baz/froboz.h'],
[tom],
['content/baz/froboz.h'])
def test_per_file_wildcard(self):
self.files['/OWNERS'] = 'per-file DEPS=*\n'
self.assert_files_not_covered_by(['DEPS'], [brett], [])
def test_mock_relpath(self):
# This test ensures the mock relpath has the arguments in the right
# order; this should probably live someplace else.
self.assertEquals(self.repo.relpath('foo/bar.c', 'foo/'), 'bar.c')
self.assertEquals(self.repo.relpath('/bar.c', '/'), 'bar.c')
def test_per_file_glob_across_dirs_not_allowed(self):
self.files['/OWNERS'] = 'per-file content/*[email protected]\n'
self.assertRaises(owners.SyntaxErrorInOwnersFile,
self.db().files_not_covered_by, ['DEPS'], [brett])
def test_file_include_absolute_path(self):
self.assert_files_not_covered_by(['content/qux/foo.cc'], [brett], [])
self.assert_files_not_covered_by(['content/qux/bar.cc'], [peter], [])
self.assert_files_not_covered_by(['content/qux/baz.cc'],
[tom], ['content/qux/baz.cc'])
def test_file_include_relative_path(self):
self.assert_files_not_covered_by(['content/garply/foo.cc'], [peter], [])
self.assert_files_not_covered_by(['content/garply/bar.cc'], [darin], [])
self.assert_files_not_covered_by(['content/garply/baz.cc'],
[tom], ['content/garply/baz.cc'])
def test_file_include_relative_path_non_empty_root(self):
old_root = self.root
self.root = '/content'
self.assert_files_not_covered_by(['garply/foo.cc'], [peter], [])
self.assert_files_not_covered_by(['garply/bar.cc'], [darin], [])
self.assert_files_not_covered_by(['garply/baz.cc'],
[tom], ['garply/baz.cc'])
self.root = old_root
def test_file_include_per_file_absolute_path(self):
self.files['/content/qux/OWNERS'] = owners_file(peter,
lines=['per-file foo.*=file://content/baz/OWNERS'])
self.assert_files_not_covered_by(['content/qux/foo.cc'], [brett], [])
self.assert_files_not_covered_by(['content/qux/baz.cc'],
[brett], ['content/qux/baz.cc'])
def test_file_include_per_file_relative_path(self):
self.files['/content/garply/OWNERS'] = owners_file(brett,
lines=['per-file foo.*=file:test/OWNERS'])
self.assert_files_not_covered_by(['content/garply/foo.cc'], [peter], [])
self.assert_files_not_covered_by(['content/garply/baz.cc'],
[peter], ['content/garply/baz.cc'])
def test_file_include_recursive(self):
self.files['/content/baz/OWNERS'] = owners_file(file='//chrome/gpu/OWNERS')
self.assert_files_not_covered_by(['content/qux/foo.cc'], [ken], [])
def test_file_include_different_filename(self):
# This tests that a file named something other than OWNERS is not treated
# like OWNERS; we want to make sure that ken and peter don't become owners
# for /content, and that other owners for content still work.
self.files['/content/baz/OWNERS'] = owners_file(file='//content/BAZ_OWNERS')
self.files['/content/BAZ_OWNERS'] = owners_file([ken, peter])
self.assert_files_not_covered_by(
['content/baz/baz.cc', 'content/qux/foo.cc'],
[ken], ['content/qux/foo.cc'])
self.assert_files_not_covered_by(
['content/baz/baz.cc', 'content/qux/foo.cc'],
[ken, john], [])
def test_file_include_recursive_loop(self):
self.files['/content/baz/OWNERS'] = owners_file(brett,
file='//content/qux/OWNERS')
self.test_file_include_absolute_path()
def test_file_include_different_filename(self):
self.files['/owners/GARPLY_OWNERS'] = owners_file(peter)
self.files['/content/garply/OWNERS'] = owners_file(john,
lines=['per-file foo.*=file://owners/GARPLY_OWNERS'])
self.assert_files_not_covered_by(['content/garply/foo.cc'], [peter], [])
def test_file_include_invalid_filename(self):
self.files['/base/SECURITY_REVIEWERS'] = owners_file(peter)
self.files['/ipc/OWNERS'] = owners_file(file='//base/SECURITY_REVIEWERS')
try:
self.db().reviewers_for(['ipc/ipc_message_utils.h'], None)
self.fail() # pragma: no cover
except owners.SyntaxErrorInOwnersFile, e:
self.assertTrue(str(e).startswith('/ipc/OWNERS:1'))
def assert_syntax_error(self, owners_file_contents):
db = self.db()
self.files['/foo/OWNERS'] = owners_file_contents
self.files['/foo/DEPS'] = ''
try:
db.reviewers_for(['foo/DEPS'], None)
self.fail() # pragma: no cover
except owners.SyntaxErrorInOwnersFile, e:
self.assertTrue(str(e).startswith('/foo/OWNERS:1'))
def test_syntax_error__unknown_token(self):
self.assert_syntax_error('{}\n')
def test_syntax_error__unknown_set(self):
self.assert_syntax_error('set myfatherisbillgates\n')
def test_syntax_error__bad_email(self):
self.assert_syntax_error('ben\n')
def test_syntax_error__invalid_absolute_file(self):
self.assert_syntax_error('file://foo/bar/OWNERS\n')
def test_syntax_error__invalid_relative_file(self):
self.assert_syntax_error('file:foo/bar/OWNERS\n')
def test_non_existant_status_file(self):
db = self.db()
self.files['/OWNERS'] = owners_file(brett,
comment='OWNERS_STATUS = nonexistant')
self.files['/foo/DEPS'] = ''
self.assertRaises(IOError, db.reviewers_for, ['foo/DEPS'], None)
def test_comment_to_owners_mapping(self):
db = self.db()
self.files['/OWNERS'] = '\n'.join([
'# first comment',
ben,
brett + ' # inline comment',
'',
darin,
'',
'# comment preceeded by empty line',
'per-file bar.*=%s' % jochen,
john,
'',
ken,
'# comment in the middle',
peter,
tom])
# Force loading of the OWNERS file.
self.files['/bar.cc'] = ''
db.reviewers_for(['bar.cc'], None)
self.assertEqual(db.comments, {
ben: {'': 'first comment'},
brett: {'': 'first comment inline comment'},
jochen: {'bar.*': 'comment preceeded by empty line'},
john: {'': 'comment preceeded by empty line'},
peter: {'': 'comment in the middle'}})
class ReviewersForTest(_BaseTestCase):
def assert_reviewers_for(self, files, potential_suggested_reviewers,
author=None, override_files=None):
db = self.db()
db.override_files = override_files or {}
suggested_reviewers = db.reviewers_for(set(files), author)
self.assertTrue(suggested_reviewers in
[set(suggestion) for suggestion in potential_suggested_reviewers])
def test_reviewers_for__basic_functionality(self):
self.assert_reviewers_for(['chrome/gpu/gpu_channel.h'],
[[ken]])
def test_reviewers_for__set_noparent_works(self):
self.assert_reviewers_for(['content/content.gyp'],
[[john],
[darin]])
def test_reviewers_for__valid_inputs(self):
db = self.db()
# Check that we're passed in a sequence that isn't a string.
self.assertRaises(AssertionError, db.reviewers_for, 'foo', None)
if hasattr(owners.collections, 'Iterable'):
self.assertRaises(AssertionError, db.reviewers_for,
(f for f in ['x', 'y']), None)
# Check that the files are under the root.
db.root = '/checkout'
self.assertRaises(AssertionError, db.reviewers_for, ['/OWNERS'], None)
def test_reviewers_for__wildcard_dir(self):
self.assert_reviewers_for(['DEPS'], [['<anyone>']])
self.assert_reviewers_for(['DEPS', 'chrome/gpu/gpu_channel.h'], [[ken]])
def test_reviewers_for__one_owner(self):
self.assert_reviewers_for([
'chrome/gpu/gpu_channel.h',
'content/baz/froboz.h',
'chrome/renderer/gpu/gpu_channel_host.h'],
[[brett]])
def test_reviewers_for__two_owners(self):
self.assert_reviewers_for([
'chrome/gpu/gpu_channel.h',
'content/content.gyp',
'content/baz/froboz.h',
'content/views/pie.h'],
[[ken, john]])
def test_reviewers_for__all_files(self):
self.assert_reviewers_for([
'chrome/gpu/gpu_channel.h',
'chrome/renderer/gpu/gpu_channel_host.h',
'chrome/renderer/safe_browsing/scorer.h',
'content/content.gyp',
'content/bar/foo.cc',
'content/baz/froboz.h',
'content/views/pie.h'],
[[peter, ken, john]])
def test_reviewers_for__per_file_owners_file(self):
self.files['/content/baz/OWNERS'] = owners_file(lines=[
'per-file ugly.*[email protected]'])
self.assert_reviewers_for(['content/baz/OWNERS'],
[[john],
[darin]])
def test_reviewers_for__per_file(self):
self.files['/content/baz/OWNERS'] = owners_file(lines=[
'per-file ugly.*[email protected]'])
self.assert_reviewers_for(['content/baz/ugly.cc'],
[[tom]])
def test_reviewers_for__two_nested_dirs(self):
# The same owner is listed in two directories (one above the other)
self.assert_reviewers_for(['chrome/browser/defaults.h'],
[[brett]])
# Here, although either ben or brett could review both files,
# someone closer to the gpu_channel_host.h should also be suggested.
# This also tests that we can handle two suggested reviewers
# with overlapping sets of directories properly.
self.files['/chrome/renderer/gpu/OWNERS'] = owners_file(ken)
self.assert_reviewers_for(['chrome/OWNERS',
'chrome/renderer/gpu/gpu_channel_host.h'],
[[ben, ken],
[brett, ken]])
def test_reviewers_for__author_is_known(self):
# We should never suggest ken as a reviewer for his own changes.
self.assert_reviewers_for(['chrome/gpu/gpu_channel.h'],
[[ben], [brett]], author=ken)
def test_reviewers_for__ignores_unowned_files(self):
# Clear the root OWNERS file.
self.files['/OWNERS'] = ''
self.assert_reviewers_for(['base/vlog.h', 'chrome/browser/deafults/h'],
[[brett]])
def test_reviewers_file_includes__absolute(self):
self.assert_reviewers_for(['content/qux/foo.cc'],
[[peter], [brett], [john], [darin]])
def test_reviewers_file_includes__relative(self):
self.assert_reviewers_for(['content/garply/foo.cc'],
[[peter], [john], [darin]])
def test_reviewers_file_includes__per_file(self):
self.files['/content/garply/OWNERS'] = owners_file(brett,
lines=['per-file foo.*=file:test/OWNERS'])
self.assert_reviewers_for(['content/garply/foo.cc'],
[[brett], [peter]])
self.assert_reviewers_for(['content/garply/bar.cc'],
[[brett]])
def test_reviewers_file_includes__per_file_noparent(self):
self.files['/content/garply/OWNERS'] = owners_file(brett,
lines=['per-file foo.*=set noparent',
'per-file foo.*=file:test/OWNERS'])
self.assert_reviewers_for(['content/garply/foo.cc'],
[[peter]])
self.assert_reviewers_for(['content/garply/bar.cc'],
[[brett]])
def test_override_files(self):
self.assert_reviewers_for(['content/baz/froboz.h'], [[jochen]],
override_files={'content/baz/OWNERS': [jochen]})
self.assert_reviewers_for(['content/baz/froboz.h'], [[john],[darin]],
override_files={'content/baz/OWNERS': []})
self.assert_reviewers_for(
['content/baz/froboz.h'], [[jochen]],
override_files={'content/baz/OWNERS': ['file://JOCHEN_OWNERS'],
'JOCHEN_OWNERS': [jochen]})
class LowestCostOwnersTest(_BaseTestCase):
# Keep the data in the test_lowest_cost_owner* methods as consistent with
# test_repo() where possible to minimize confusion.
def check(self, possible_owners, dirs, *possible_lowest_cost_owners):
suggested_owner = owners.Database.lowest_cost_owner(possible_owners, dirs)
self.assertTrue(suggested_owner in possible_lowest_cost_owners)
def test_one_dir_with_owner(self):
# brett is the only immediate owner for stuff in baz; john is also
# an owner, but further removed. We should always get brett.
self.check({brett: [('content/baz', 1)],
john: [('content/baz', 2)]},
['content/baz'],
brett)
# john and darin are owners for content; the suggestion could be either.
def test_one_dir_with_two_owners(self):
self.check({john: [('content', 1)],
darin: [('content', 1)]},
['content'],
john, darin)
def test_one_dir_with_two_owners_in_parent(self):
# As long as the distance is the same, it shouldn't matter (brett isn't
# listed in this case).
self.check({john: [('content/baz', 2)],
darin: [('content/baz', 2)]},
['content/baz'],
john, darin)
def test_two_dirs_two_owners(self):
# If they both match both dirs, they should be treated equally.
self.check({john: [('content/baz', 2), ('content/bar', 2)],
darin: [('content/baz', 2), ('content/bar', 2)]},
['content/baz', 'content/bar'],
john, darin)
# Here brett is better since he's closer for one of the two dirs.
self.check({brett: [('content/baz', 1), ('content/views', 1)],
darin: [('content/baz', 2), ('content/views', 1)]},
['content/baz', 'content/views'],
brett)
def test_hierarchy(self):
# the choices in these tests are more arbitrary value judgements;
# also, here we drift away from test_repo() to cover more cases.
# Here ben isn't picked, even though he can review both; we prefer
# closer reviewers.
self.check({ben: [('chrome/gpu', 2), ('chrome/renderer', 2)],
ken: [('chrome/gpu', 1)],
peter: [('chrome/renderer', 1)]},
['chrome/gpu', 'chrome/renderer'],
ken, peter)
# Here we always pick ben since he can review either dir as well as
# the others but can review both (giving us fewer total reviewers).
self.check({ben: [('chrome/gpu', 1), ('chrome/renderer', 1)],
ken: [('chrome/gpu', 1)],
peter: [('chrome/renderer', 1)]},
['chrome/gpu', 'chrome/renderer'],
ben)
# However, three reviewers is too many, so ben gets this one.
self.check({ben: [('chrome/gpu', 2), ('chrome/renderer', 2),
('chrome/browser', 2)],
ken: [('chrome/gpu', 1)],
peter: [('chrome/renderer', 1)],
brett: [('chrome/browser', 1)]},
['chrome/gpu', 'chrome/renderer',
'chrome/browser'],
ben)
if __name__ == '__main__':
unittest.main()
|
py | b4163a1c0bf943cfec39cf4354dc2184e96b1afd | import json
from base64 import urlsafe_b64encode, urlsafe_b64decode
from pyramid_jinja2.filters import route_url_filter, static_url_filter
def encode_route(request):
"""Jinja2 filter that returns the current route as a JSON object, which is then URL-safe base64 encoded."""
if request.matched_route:
data = {'route': request.matched_route.name,
'params': request.matchdict,
'query': list(request.params.items())}
return urlsafe_b64encode(json.dumps(data).encode('utf-8')).decode()
return None
def decode_route(request, default_route='root', default_route_params=None, default_route_query=None):
"""Jinja2 filter that decodes and returns the route URL encoded with :func:`~toja.routes.encode_route`."""
if 'redirect' in request.params and request.params['redirect']:
try:
data = json.loads(urlsafe_b64decode(request.params['redirect'].encode()).decode('utf-8'))
return request.route_url(data['route'], **data['params'], _query=data['query'])
except Exception:
pass
if not default_route_params:
default_route_params = {}
return request.route_url(default_route, **default_route_params, _query=default_route_query)
def update_current_route(request, params=None, query=None):
"""Update the current route with new parameters or query."""
if query:
tmp = []
for key in request.params.keys():
if key in query:
tmp.append((key, query[key]))
else:
for val in request.params.getall(key):
tmp.append((key, val))
for key, value in query.items():
tmp.append((key, value))
query = tmp
if params and query:
return request.current_route_url(**params, _query=query)
elif params:
return request.current_route_url(**params)
elif query:
return request.current_route_url(_query=query)
else:
return request.current_route_url()
def includeme(config):
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('root', '/')
config.add_route('user.index', '/users')
config.add_route('user.register', '/users/register')
config.add_route('user.confirm', '/users/confirm/:email/:token')
config.add_route('user.login', '/users/login')
config.add_route('user.logout', '/users/logout')
config.add_route('user.forgotten_password', '/users/forgotten_password')
config.add_route('user.view', '/users/:uid')
config.add_route('user.edit', '/users/:uid/edit')
config.add_route('user.delete', '/users/:uid/delete', request_method='POST')
config.add_route('search', '/search')
config.add_route('search.autosuggest', '/search/autosuggest/:category')
config.add_route('explore.recommend.mlt', '/explore/mlt/:jid')
config.add_route('explore.recommend.random', '/explore/random')
config.add_route('contribute', '/contribute')
config.add_route('contribute.sources', '/contribute/sources')
config.add_route('contribute.workbench', '/contribute/workbench')
config.add_route('contribute.workbench.edit', '/contribute/workbench/:sid')
config.add_route('joke.view', '/jokes/:jid')
config.add_route('joke.image', '/jokes/:jid/image')
config.add_route('joke.rate', '/jokes/:jid/rate')
config.add_route('source.index', '/sources')
config.add_route('source.view', '/sources/:sid')
config.add_route('source.image', '/sources/:sid/image')
config.add_route('source.edit', '/sources/:sid/edit')
config.add_route('source.delete', '/sources/:sid/delete', request_method='POST')
config.add_route('admin.index', '/admin')
config.add_route('admin.search', '/admin/search')
config.add_route('admin.jokes', '/admin/jokes')
config.add_route('api', '/api')
config.add_route('api.sources.get', '/api/sources', request_method='GET')
config.add_route('api.sources.post', '/api/sources', request_method='POST')
config.add_route('api.source.get', '/api/sources/:sid', request_method='GET')
config.add_route('api.source.put', '/api/sources/:sid', request_method='PUT')
config.add_route('api.source.delete', '/api/sources/:sid', request_method='DELETE')
config.add_route('api.jokes.get', '/api/jokes', request_method='GET')
config.add_route('api.jokes.post', '/api/jokes', request_method='POST')
config.add_route('api.joke.get', '/api/jokes/:jid', request_method='GET')
config.add_route('api.joke.put', '/api/jokes/:jid', request_method='PUT')
config.add_route('api.joke.delete', '/api/jokes/:jid', request_method='DELETE')
config.add_route('api.transcriptions.get', '/api/transcriptions', request_method='GET')
config.add_route('api.transcriptions.post', '/api/transcriptions', request_method='POST')
config.add_route('api.transcription.get', '/api/transcriptions/:tid', request_method='GET')
config.add_route('api.transcription.patch', '/api/transcriptions/:tid', request_method='PATCH')
config.add_route('api.transcription.delete', '/api/transcriptions/:tid', request_method='DELETE')
# Jinja2 configuration
config.get_jinja2_environment().filters['static_url'] = static_url_filter
config.get_jinja2_environment().filters['route_url'] = route_url_filter
config.get_jinja2_environment().filters['encode_route'] = encode_route
config.get_jinja2_environment().filters['decode_route'] = decode_route
config.get_jinja2_environment().filters['update_current_route'] = update_current_route
|
py | b4163bccb991c5aef9c257206642e32ce758ba93 | #---------------------------------------------------------------------------
# Copyright 2013 The Open Source Electronic Health Record Agent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
import sys
import os
import time
from OSEHRAHelper import PROMPT
def startFileman(VistA):
# Start FileMan as the programmer user and set XUMF to 1 which lets the user
# change information in Kernel files
# Starts at the VistA Prompt
VistA.wait(PROMPT)
VistA.write('S DUZ=1 S XUMF=1 D Q^DI')
VistA.wait('Select OPTION:')
def signonZU(VistA,acc_code,ver_code):
# Sign a user into the ZU menu system
# The User must have a valid access code and verify code.
# If the user needs to change the Verify Code, the script will append a "!" to the old code
# and use that as the new one.
# Starts at the VistA prompt.
VistA.wait(PROMPT,60)
VistA.write('D ^ZU')
VistA.wait('ACCESS CODE:')
VistA.write(acc_code)
VistA.wait('VERIFY CODE:')
VistA.write(ver_code)
index = VistA.multiwait(['TYPE NAME','verify code:'])
if index==1:
VistA.write(ver_code)
VistA.wait('VERIFY CODE:')
VistA.write(ver_code+"!")
VistA.wait('right:')
VistA.write(ver_code+"!")
VistA.wait('TYPE NAME:')
VistA.write('')
def initializeFileman(VistA,site_name,site_number):
# Initializes FileMan via the DINIT routine.
# The command needs a site name to change to and a local site number
# Script uses value of CMake variable TEST_VISTA_SETUP_SITE_NAME as the name
# and 6161 as the site number.
VistA.write('D ^DINIT')
VistA.wait('Initialize VA FileMan now?')
VistA.write('Yes')
VistA.wait('SITE NAME:')
VistA.write(site_name)
VistA.wait('SITE NUMBER')
VistA.write(site_number)
# It will also change the operating system file to match the local environment type
# found by the set up.
VistA.wait('Do you want to change the MUMPS OPERATING SYSTEM File?')
VistA.write('Yes')
VistA.wait('TYPE OF MUMPS SYSTEM YOU ARE USING')
if VistA.type=='cache':
VistA.write('CACHE')
else:
VistA.write('GT.M(UNIX)')
VistA.wait(PROMPT,60)
# Use the ZUSET routine to rename the correct ZU* for the system.
VistA.write('D ^ZUSET')
VistA.wait('Rename')
VistA.write('Yes')
def setupPrimaryHFSDir(VistA,hfs_dir):
# Set up the primary HFS directory from the
# Kernel System Parameters file
#
# "@" to remove or set a new file path.
startFileman(VistA)
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE')
VistA.write('KERNEL SYSTEM PARAMETERS')
VistA.wait('EDIT WHICH FIELD')
VistA.write('PRIMARY HFS DIRECTORY')
VistA.wait('THEN EDIT FIELD')
VistA.write('')
VistA.wait('DOMAIN NAME')
# `1 is the notation to grab the entry with a number of 1
VistA.write('`1')
VistA.wait('PRIMARY HFS DIRECTORY')
VistA.write(os.path.normpath(hfs_dir))
# Multiwait to capture the possible outcomes:
# SURE YOU WANT TO DELETE: File has an entry and the @ will delete it
# DOMAIN NAME: Entry was an acceptable response
# PRIMARY HFS DIRECTORY: Response was not accepted, could be due to
# deleting an empty file entry
index = VistA.multiwait(['SURE YOU WANT TO DELETE','DOMAIN NAME','PRIMARY HFS DIRECTORY'])
if index == 0:
VistA.write('Y')
VistA.wait('DOMAIN NAME')
if index == 2:
VistA.write("")
VistA.wait("DOMAIN NAME")
VistA.write('')
VistA.wait('Select OPTION:')
VistA.write('')
def configureNULLDevice(VistA):
# Ensure that the null device is correctly configured by adding
# a $I for the correct platform rather than VMS and removing
# sign-on capabilities
startFileman(VistA)
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE')
VistA.write('DEVICE')
VistA.wait('EDIT WHICH FIELD')
VistA.write('$I\rSIGN-ON/SYSTEM DEVICE\r')
VistA.wait('NAME:')
VistA.write('NULL\r1')
VistA.wait('//')
# Path added is dependent on the platform that is being used.
if sys.platform=='win32':
VistA.write('//./nul\rNO\r')
else:
VistA.write('/dev/null\rNO\r')
VistA.wait("Select OPTION")
VistA.write("")
def setupVistADomain(VistA,site_name):
# Enter the site name into the DOMAIN file via FileMan
startFileman(VistA)
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE')
VistA.write('DOMAIN\r')
VistA.wait('Select DOMAIN NAME')
VistA.write(site_name)
# Multiwait for possible outcomes:
# Are you adding: Domain is new and will add it to the system
# NAME: Domain exists already
index = VistA.multiwait(["Are you adding","NAME"])
if index == 0:
VistA.write("Y")
else:
VistA.write("")
VistA.wait("FLAGS")
VistA.write('^\r\r')
VistA.wait(PROMPT,60)
# christen the domain via the XMUDCHR routine.
VistA.write('D CHRISTEN^XMUDCHR')
VistA.wait('Are you sure you want to change the name of this facility?')
VistA.write('Yes')
VistA.wait('Select DOMAIN NAME')
VistA.write(site_name)
VistA.wait('PARENT')
VistA.write('')
VistA.wait('TIME ZONE')
# Attempts to pull the timezone from the local machine via Python
# If entry is not accepted, will default to EST
VistA.write(time.strftime('%Z').replace(' Time',''))
index = VistA.multiwait([VistA.prompt,'TIME ZONE'])
if index==1:
VistA.write('EST')
VistA.wait(PROMPT,60)
# Next, Find IEN of new site name and add entries of new domain to
# Kernel System Parameters and RPC Broker Site Parameters files
VistA.IEN('DOMAIN',site_name)
VistA.wait(PROMPT,60)
VistA.write('S $P(^XWB(8994.1,1,0),"^")=' + VistA.IENumber)
VistA.write('S $P(^XTV(8989.3,1,0),"^")=' + VistA.IENumber)
# Then, re-index both files with the FileMan Utility.
startFileman(VistA)
VistA.write('UTILITY')
VistA.wait('UTILITY OPTION')
VistA.write('RE')
VistA.wait('MODIFY WHAT FILE')
VistA.write('8989.3\rNO\rY\rY')
VistA.wait('UTILITY OPTION')
VistA.write('RE')
VistA.wait('MODIFY WHAT FILE')
VistA.write('8994.1\rNO\rY\rY\r')
VistA.wait('Select OPTION')
VistA.write("")
def setupBoxVolPair(VistA,volume_set,site_name,tcp_port):
# Query the instance for the Box-volume pair of the machine
VistA.getenv(volume_set)
# Rename the first Box-volume entry in the Taskman Site Parameters file
# to match what was queried above
startFileman(VistA)
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE')
VistA.write('14.7')
VistA.wait('ALL//')
VistA.write('')
VistA.wait('Select TASKMAN SITE PARAMETERS BOX-VOLUME PAIR:')
VistA.write('`1')
VistA.wait('//')
VistA.write(VistA.boxvol)
VistA.wait('RESERVED')
VistA.write('^\r')
#time.sleep(5)
# Add the Box-volume pair to the RPC Broker parameters for the local domain
# Also adds the information for the new style RPC Broker Listener on the supplied TCP port
# if a Cache system, will start a task to start the Listener, and put the
# listener under the Listener Starter's control
# if a GT.M system, will create the information but not start it.
VistA.wait('Select OPTION')
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE')
VistA.write('8994.1')
VistA.wait('EDIT WHICH FIELD')
VistA.write('LISTENER')
VistA.wait("SUB-FIELD")
VistA.write("")
VistA.wait("THEN EDIT FIELD")
VistA.write("")
VistA.wait('Select RPC BROKER SITE PARAMETERS DOMAIN NAME')
VistA.write(site_name)
VistA.wait("OK")
VistA.write("Y")
VistA.wait("BOX-VOLUME PAIR")
VistA.write(VistA.boxvol + '\r')
VistA.wait("BOX-VOLUME PAIR")
VistA.write("")
VistA.wait("Select PORT")
VistA.write(tcp_port + '\rY')
if VistA.type=='cache':
VistA.write('1\r1\r1\r')
else:
VistA.write('1\r\r\r')
VistA.wait("Select OPTION")
VistA.write("")
def setupVolumeSet(VistA,site_name,volume_set,namespace=""):
# Rename first entry in the Volume Set file to match
# the CMake value of TEST_VISTA_SETUP_VOLUME_SET.
startFileman(VistA)
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE')
VistA.write('14.5\r')
VistA.wait('Select VOLUME SET')
VistA.write('`1')
VistA.wait('VOLUME SET:')
VistA.write(volume_set+ '\r\r\r\r\r')
VistA.wait('TASKMAN FILES UCI')
if VistA.type=='cache':
VistA.write(namespace+'\r\r\r\r\r\r')
else:
VistA.write(volume_set +'\r\r\r\r\r\r')
# Add the Volume set information to the Kernel System Parameters File
VistA.wait('Select OPTION')
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE')
VistA.write('KERNEL SYSTEM PARAMETERS\rVOLUME SET\r\r')
VistA.wait('Select KERNEL SYSTEM PARAMETERS DOMAIN NAME:')
VistA.write(site_name + '\r')
VistA.wait('VOLUME SET')
VistA.write(volume_set)
index = VistA.multiwait(['Are you adding','VOLUME SET'])
if index==0:
VistA.write('Y')
elif index==1:
VistA.write('')
# Set up basic information about sign-on to the domain via the Volume Set
VistA.wait('MAX SIGNON ALLOWED')
VistA.write('500')
VistA.wait('LOG SYSTEM RT')
VistA.write('N')
VistA.wait('VOLUME SET')
VistA.write('\r\r')
def scheduleOption(VistA,optionName):
# If using Cache as the M environment, Schedule a task to start the
# XWB Listener Starter on the start up of TaskMan
VistA.wait(PROMPT)
VistA.write('S DUZ=1 D ^XUP')
VistA.wait('Select OPTION NAME')
VistA.write('EVE\r1')
VistA.wait('Systems Manager Menu')
VistA.write('Taskman Management')
VistA.wait('Select Taskman Management')
VistA.write('SCHED')
VistA.wait('reschedule:')
VistA.write(optionName + '\rY')
VistA.wait('COMMAND:')
VistA.write('\r^SPECIAL QUEUEING\rSTARTUP\rS\rE\r')
VistA.wait('Select Taskman Management')
VistA.write('')
VistA.wait('Systems Manager Menu')
VistA.write('')
VistA.wait('Do you really want to halt')
VistA.write('Y')
def restartTaskMan(VistA):
# Restarts the TaskMan instance via the Taskman Management Utilities Menu.
VistA.wait(PROMPT)
VistA.write('S DUZ=1 D ^XUP')
VistA.wait('Select OPTION NAME')
VistA.write('EVE\r1')
VistA.wait('Systems Manager Menu')
VistA.write('Taskman Management')
VistA.wait('Select Taskman Management')
VistA.write('Taskman Management Utilities')
VistA.wait('Select Taskman Management Utilities')
VistA.write('Restart Task Manager\rY')
VistA.wait('Select Taskman Management Utilities')
VistA.write('')
VistA.wait('Select Taskman Management')
VistA.write('')
VistA.wait('Select Systems Manager Menu')
VistA.write('')
VistA.wait('Do you really want to halt')
VistA.write('Y')
VistA.wait(PROMPT)
VistA.write('K')
def addSystemManager(VistA):
# Add the super user System Manager via the User Management Menu
# Set basic information about the user: Name,SSN, Sex ....
VistA.wait(PROMPT,60)
VistA.write('S DUZ=1 D ^XUP')
VistA.wait('Select OPTION NAME')
VistA.write('EVE\r1')
VistA.wait('Systems Manager Menu')
VistA.write('USER MANAGEMENT')
VistA.wait('User Management')
VistA.write('ADD')
VistA.wait('Enter NEW PERSON')
VistA.write('MANAGER,SYSTEM')
index = VistA.multiwait(['Are you adding','Want to reactivate'])
if index == 0:
VistA.write('Y')
VistA.wait('INITIAL:')
VistA.write('SM')
VistA.wait('SSN:')
VistA.write('000000001')
VistA.wait('SEX:')
VistA.write('M')
VistA.wait('NPI')
VistA.write('')
VistA.wait('NAME COMPONENTS')
# A ScreenMan form opens at this point, and the following information is set:
# Primary Menu: EVE
# Secondary Menu: OR PARAM COORDINATOR MENU, TIU IRM MAINTENANCE MENU,
# XPAR MENU TOOLS,DG REGISTER PATIENT
# Access Code: SM1234
# Verify Code: SM1234!!
VistA.write('\r\r\r\r\r^PRIMARY MENU OPTION\rEVE\r1\r^Want to edit ACCESS CODE\rY\rSM1234\rSM1234\r^Want to edit VERIFY CODE\rY\rSM1234!!\rSM1234!!\r^SECONDARY MENU OPTIONS\rOR PARAM COORDINATOR MENU\rY\r\r\r\rTIU IRM MAINTENANCE MENU\rY\r\r\r\rXPAR MENU TOOLS\rY\r\r\r\rDG REGISTER PATIENT\rY\r\r\r\r^MULTIPLE SIGN-ON\r1\r1\r99\r^SERVICE/SECTION\rIRM\r^\rY')
# Exiting the ScreenMan form, Allocate Security Keys
# For Kernel Access: XUMGR, XUPROG, XUPROGMODE
# and Scheduling Access: SD SUPERVISOR, SDWL PARAMETER, SDWL MENU
VistA.wait('User Account Access Letter')
VistA.write('NO')
VistA.wait('wish to allocate security keys?')
VistA.write('Y')
VistA.wait('Allocate key')
VistA.write('XUMGR')
VistA.wait('Another key')
VistA.write('XUPROG\r1')
VistA.wait('Another key')
VistA.write('XUPROGMODE')
VistA.wait('Another key')
VistA.write('SD SUPERVISOR')
VistA.wait('Another key')
VistA.write('SDWL PARAMETER')
VistA.wait('Another key')
VistA.write('SDWL MENU')
VistA.wait('Another key')
VistA.write('')
VistA.wait('Another holder')
VistA.write('')
VistA.wait('YES//')
VistA.write('')
VistA.wait('mail groups?')
VistA.write('\r')
VistA.wait('Systems Manager Menu')
VistA.write('\rY')
VistA.wait(PROMPT,60)
# Get the record number of the user that was just created
VistA.IEN('NEW PERSON','MANAGER,SYSTEM')
VistA.wait(PROMPT,60)
# Set a piece of the New Person global corresponding to the MANAGER,SYSTEM
# to "@" to tell FileMan that user is a programmer
VistA.write('S DUZ=' + VistA.IENumber + ' S $P(^VA(200,DUZ,0),"^",4)="@"')
def addInstitution(VistA,inst_name,station_number):
# In FileMan, add a entry to the Institution file
# Pass in the name and number as arguments to allow for
# multiple additions.
startFileman(VistA)
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE:')
VistA.write('4')
VistA.wait('EDIT WHICH FIELD')
VistA.write('STATION NUMBER')
VistA.wait('THEN EDIT FIELD')
VistA.write('')
VistA.wait('Select INSTITUTION NAME:')
VistA.write(inst_name)
index = VistA.multiwait(['Are you adding','STATION NUMBER'])
if index==0:
VistA.write('Y')
VistA.wait('STATION NUMBER:')
VistA.write(station_number)
VistA.wait('Select INSTITUTION NAME:')
VistA.write('')
VistA.wait('Select OPTION:')
VistA.write('')
def addDivision(VistA,div_name, facility_number,station_number):
# Adds a division to the VistA instance via FileMan,
# Each Division needs a name and a facility number. The station number
# points back to the recently created Institution
startFileman(VistA)
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE:')
VistA.write('40.8')
VistA.wait('EDIT WHICH FIELD')
VistA.write('FACILITY NUMBER')
VistA.wait('THEN EDIT FIELD')
VistA.write('INSTITUTION FILE POINTER')
VistA.wait('THEN EDIT FIELD')
VistA.write('')
VistA.wait('DIVISION NAME')
VistA.write(div_name)
VistA.wait('Are you adding')
VistA.write('Y')
VistA.wait('MEDICAL CENTER DIVISION NUM:')
VistA.write('')
VistA.wait('FACILITY NUMBER')
VistA.write(facility_number)
VistA.write('')
VistA.wait('INSTITUTION FILE POINTER')
VistA.write(station_number)
VistA.wait('DIVISION NAME')
VistA.write('')
VistA.wait('Select OPTION')
VistA.write('')
def setupStrepTest(VistA):
# The Sikuli test for CPRS orders a Streptozyme test for the patient
# This information ensures the test can be ordered at the VistA Health care
# Facility
# Add a NUMERIC IDENTIFIER to the Chemistry ACCESSION Area
# This is necessary to add a laboratory test to an Accession
# area at an Institution.
startFileman(VistA)
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE')
VistA.write('ACCESSION\r1')
VistA.wait('EDIT WHICH FIELD')
VistA.write('.4\r')
VistA.wait('Select ACCESSION AREA')
VistA.write('CHEMISTRY')
VistA.wait('NUMERIC IDENTIFIER')
VistA.write('CH\r')
# Change the STREPTOZYME test to be accessioned through the Chemistry
# area at the Vista Health Care institution
VistA.wait('OPTION')
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE')
VistA.write('LABORATORY TEST')
VistA.wait('EDIT WHICH FIELD')
VistA.write('ACCESSION AREA\r\r')
VistA.wait('Select LABORATORY TEST NAME')
VistA.write('STREPTOZYME')
VistA.wait('Select INSTITUTION')
VistA.write('VISTA HEALTH CARE')
VistA.wait('ACCESSION AREA')
VistA.write('CHEMISTRY')
VistA.wait('Select LABORATORY TEST NAME')
VistA.write('')
# Change the Package Prefix of the ONCE schedule to be
# used by the Laboratory
VistA.wait('OPTION')
VistA.write('1')
VistA.wait('INPUT TO WHAT FILE')
VistA.write('ADMINISTRATION SCHEDULE')
VistA.wait('EDIT WHICH FIELD')
VistA.write('PACKAGE PREFIX\r')
VistA.wait('Select ADMINISTRATION SCHEDULE NAME')
VistA.write('ONCE')
VistA.wait('P')
VistA.write('LR')
VistA.wait('ADMINISTRATION SCHEDULE')
VistA.write('')
VistA.wait('Select OPTION')
VistA.write('')
# Set Up the Quick Order entry for the Strep Throat
# Default to a one time, swab collection.
VistA.wait(PROMPT)
VistA.write('K D ^XUP')
VistA.wait("Access Code")
VistA.write("SM1234")
index = VistA.multiwait(['Select OPTION NAME','TERMINAL TYPE NAME'])
if index ==1:
VistA.write("C-VT220")
VistA.wait("Select OPTION NAME")
VistA.write("Systems Manager Menu")
VistA.wait('Systems Manager Menu')
VistA.write('CPRS Configuration')
VistA.wait('CPRS Configuration')
VistA.write('MM')
VistA.wait('Order Menu Management')
VistA.write('QO')
VistA.wait('Select QUICK ORDER NAME')
VistA.write('LRZ STREP TEST')
VistA.wait('Are you adding')
VistA.write('Y')
VistA.wait('TYPE OF QUICK ORDER')
VistA.write('LAB\r')
VistA.wait('DISPLAY TEXT')
VistA.write('STREP TEST')
VistA.wait('VERIFY ORDER')
VistA.write('Y')
VistA.wait('DESCRIPTION')
VistA.write('N\r')
VistA.wait('Lab Test')
VistA.write('STREP\r2')
VistA.wait('Collected By')
VistA.write('SP')
VistA.wait('Collection Sample')
VistA.write('SWAB\r')
VistA.wait('Collection Date/Time')
VistA.write('TODAY\r')
VistA.wait('How often')
VistA.write('ONCE')
VistA.wait('PLACE//')
VistA.write('\r\r')
VistA.wait('Option')
VistA.write('ST')
VistA.wait('Select ORDER SET NAME')
VistA.write('STREP TEST')
VistA.wait('Are you adding')
VistA.write('Y')
VistA.wait('Do you wish to copy')
VistA.write('No\r')
VistA.wait('DISPLAY TEXT')
VistA.write('Strep Test\r\r\r')
VistA.wait('COMPONENT SEQUENCE')
VistA.write('10\r')
VistA.wait('ITEM:')
VistA.write('LRZ STREP TEST\r\r\r\r') # Return to EVE menu
VistA.wait("Systems Manager Menu")
VistA.write("")
VistA.wait("Do you really")
VistA.write("Y")
def registerVitalsCPRS(VistA):
# Register the DLL versions for Vitals and the executable version for
# CPRS through the XPAR Menu. This information should match the versions
# that will be used during testing.
# Files can be downloaded: http://www.osehra.org/document/guis-used-automatic-functional-testing
VistA.wait(PROMPT,60)
VistA.write('S GMVDLL=\"GMV_VITALSVIEWENTER.DLL:v. 08/11/09 15:00\"')
VistA.wait(PROMPT,60)
VistA.write('D EN^XPAR(\"SYS\",\"GMV DLL VERSION\",GMVDLL,1)')
VistA.wait(PROMPT,60)
VistA.write('S GMVDLL=\"GMV_VITALSVIEWENTER.DLL:v. 01/21/11 12:52\"')
VistA.wait(PROMPT,60)
VistA.write('D EN^XPAR(\"SYS\",\"GMV DLL VERSION\",GMVDLL,1)')
VistA.wait(PROMPT,60)
VistA.write('S GMVGUI=\"VITALSMANAGER.EXE:5.0.26.1\"')
VistA.wait(PROMPT,60)
VistA.write('D EN^XPAR(\"SYS\",\"GMV GUI VERSION\",GMVGUI,1)')
def addDoctor(VistA,name,init,SSN,sex,AC,VC1):
# Adds a Doctor user into the system via the User Management Menu as
# the System Manager.
# Needs:
# Doctor Name, Doctor Initials, SSN, Sex, Access Code, Verify Code
VistA.write('USER MANAGEMENT')
VistA.wait('User Management')
VistA.write('ADD')
VistA.wait('name')
VistA.write(name+'\rY')
VistA.wait('INITIAL:')
VistA.write(init)
VistA.wait('SSN:')
VistA.write(SSN)
VistA.wait('SEX:')
VistA.write(sex)
VistA.wait('NPI')
VistA.write('')
VistA.wait('NAME COMPONENTS')
# A ScreenMan form opens at this point, and the following information is set:
# Primary Menu: XUCORE
# Secondary Menu: GMPL MGT MENU, OR CPRS GUI CHART, GMV V/M GUI
# Access Code: <passed as argument>
# Verify Code: <passed as argument>
# No restriction on Patient Selection
# Allowed multiple sign-ons
# Allopathic and Osteopathic Physicians as the Person Class
# Core CPRS Tab access
VistA.write('\r\r\r\r\r^PRIMARY MENU OPTION\rXUCOR\r^SECONDARY MENU OPTIONS\rGMPL MGT MENU\rY\r\r\r\rOR CPRS GUI CHART\rY\r\r\r\rGMV V/M GUI\rY\r\r\r\r^Want to edit ACCESS CODE\rY\r'+AC+'\r'+AC+'\r^Want to edit VERIFY CODE\rY\r'+VC1+'\r'+VC1+'\rVISTA HEALTH CARE\rY\r\r\r\r\r^SERVICE/SECTION\rIRM\r^Language\r\r767\rY\rY\rT-1\r\r^RESTRICT PATIENT SELECTION\r0\r\rCOR\rY\rT-1\r\r^MULTIPLE SIGN-ON\r1\r1\r99\r^\rE\rY')
# Exiting the ScreenMan form, Allocate Security Keys
# PROVIDER,GMV MANAGER,LRLAB,LRVERIFY,ORES,SD SUPERVISOR,SDWL PARAMETER,SDWL MENU,
VistA.wait('User Account Access Letter')
VistA.write('NO')
VistA.wait('wish to allocate security keys?')
VistA.write('Y')
VistA.wait('Allocate key')
VistA.write('PROVIDER\r1')
VistA.wait('Another key')
VistA.write('GMV MANAGER')
VistA.wait('Another key')
VistA.write('LRLAB')
VistA.wait('Another key')
VistA.write('LRVERIFY')
VistA.wait('Another key')
VistA.write('ORES')
VistA.wait('Another key')
VistA.write('SD SUPERVISOR')
VistA.wait('Another key')
VistA.write('SDWL PARAMETER')
VistA.wait('Another key')
VistA.write('SDWL MENU')
VistA.wait('Another key')
VistA.write('')
VistA.wait('Another holder')
VistA.write('')
VistA.wait('Do you wish to proceed')
VistA.write('Yes')
VistA.wait('add this user to mail groups')
VistA.write('NO')
VistA.wait("User Management")
VistA.write("")
def addNurse(VistA,name,init,SSN,sex,AC,VC1):
# Adds a Nurse user into the system via the User Management Menu as
# the System Manager.
# Needs:
# Nurse Name, Nurse Initials, SSN, Sex, Access Code, Verify Code
VistA.wait("Systems Manager Menu")
VistA.write("User Management")
VistA.wait('User Management')
VistA.write('ADD')
VistA.wait('name')
VistA.write(name+'\rY')
VistA.wait('INITIAL:')
VistA.write(init)
VistA.wait('SSN:')
VistA.write(SSN)
VistA.wait('SEX:')
VistA.write(sex)
VistA.wait('NPI')
VistA.write('')
VistA.wait('NAME COMPONENTS')
# A ScreenMan form opens at this point, and the following information is set:
# Primary Menu: XUCORE
# Secondary Menu: GMPL MGT MENU, OR CPRS GUI CHART, GMV V/M GUI
# Access Code: <passed as argument>
# Verify Code: <passed as argument>
# No restriction on Patient Selection
# Allowed multiple sign-ons
# Nursing Service Provider as the Person Class
# Core CPRS Tab access
VistA.write('\r\r\r\r\r^PRIMARY MENU OPTION\rXUCOR\r^SECONDARY MENU OPTIONS\rGMPL MGT MENU\rY\r\r\r\rOR CPRS GUI CHART\rY\r\r\r\rGMV V/M GUI\rY\r\r\r\r^Want to edit ACCESS CODE\rY\r'+AC+'\r'+AC+'\r^Want to edit VERIFY CODE\rY\r'+VC1+'\r'+VC1+'\rVISTA HEALTH CARE\rY\r\r\r\r\r^SERVICE/SECTION\rIRM\r^Language\r\r289\rY\rY\rT-1\r\r^RESTRICT PATIENT SELECTION\r0\r\rCOR\rY\rT-1\r\r^MULTIPLE SIGN-ON\r1\r1\r99\r^\rE\rY')
# Exiting the ScreenMan form, Allocate Security Keys
# PROVIDER,ORELSE
VistA.wait('User Account Access Letter')
VistA.write('NO')
VistA.wait('wish to allocate security keys?')
VistA.write('Y')
VistA.wait('Allocate key')
VistA.write('PROVIDER\r1')
VistA.wait('Another key')
VistA.write('ORELSE\r')
VistA.wait('Another holder')
VistA.write('')
VistA.wait('Do you wish to proceed')
VistA.write('Yes')
VistA.wait('add this user to mail groups')
VistA.write('NO')
VistA.wait("User Management")
VistA.write("")
def addClerk(VistA,name,init,SSN,sex,AC,VC1):
# Adds a Clerk user into the system via the User Management Menu as
# the System Manager.
# Needs:
# Clerk Name, Clerk Initials, SSN, Sex, Access Code, Verify Code
VistA.wait("Systems Manager Menu")
VistA.write("User Management")
VistA.wait('User Management')
VistA.write('ADD')
VistA.wait('name')
VistA.write(name+'\rY')
VistA.wait('INITIAL:')
VistA.write(init)
VistA.wait('SSN:')
VistA.write(SSN)
VistA.wait('SEX:')
VistA.write(sex)
VistA.wait('NPI')
VistA.write('')
VistA.wait('NAME COMPONENTS')
# A ScreenMan form opens at this point, and the following information is set:
# Primary Menu: XUCORE
# Secondary Menu: GMPL DATA ENTRY
# Access Code: <passed as argument>
# Verify Code: <passed as argument>
# No restriction on Patient Selection
# Allowed multiple sign-ons
# Core CPRS Tab access
VistA.write('\r\r\r\r\r^PRIMARY MENU OPTION\rXUCOR\r^SECONDARY MENU OPTIONS\rGMPL DATA ENTRY\rY\r\r\r\rOR CPRS GUI CHART\rY\r\r\r\rGMV V/M GUI\rY\r\r\r\r^Want to edit ACCESS CODE\rY\r'+AC+'\r'+AC+'\r^Want to edit VERIFY CODE\rY\r'+VC1+'\r'+VC1+'\rVISTA HEALTH CARE\rY\r\r\r\r\r^SERVICE/SECTION\rIRM\r^RESTRICT PATIENT SELECTION\r0\r\rCOR\rY\rT-1\r\r^MULTIPLE SIGN-ON\r1\r1\r99\r^\rE\rY')
# Exiting the ScreenMan form, Allocate Security Key
# ORELSE
VistA.wait('User Account Access Letter')
VistA.write('NO')
VistA.wait('wish to allocate security keys?')
VistA.write('Y')
VistA.wait('Allocate key')
VistA.write('ORELSE')
VistA.wait('Another key')
VistA.write('')
VistA.wait('Another holder')
VistA.write('')
VistA.wait('Do you wish to proceed')
VistA.write('Yes')
VistA.wait('add this user to mail groups')
VistA.write('NO')
VistA.wait("User Management")
VistA.write("")
def createOrderMenu(VistA):
# Create the Quick Order Menu to have the LRZ Strep Test as a selectable option while
# not removing the old entries.
VistA.wait('Systems Manager Menu')
VistA.write('CPRS Configuration') # We can jump straight to the CPRS (Clin Coord) menu
VistA.wait('CPRS Configuration')
VistA.write('MM') # Order Menu Management
VistA.wait('Order Menu Management')
VistA.write('MN') # Enter/edit order menus
VistA.wait('ORDER MENU:')
VistA.write('ORZ GEN MED WRITE ORDERS LIST') # New menu name
VistA.wait('Are you adding')
VistA.write('Y')
VistA.wait('Do you wish to copy an existing menu')
VistA.write('N')
VistA.wait('DISPLAY TEXT')
VistA.write('') # Ignored by GUI
VistA.wait('Edit') # DESCRIPTION field
VistA.write('N')
#VistA.write('General Medicine Write Orders list') # Menu description
#VistA.wait('2')
#VistA.write('') # End of DESCRIPTION
#VistA.wait('EDIT') # Editor options
#VistA.write('') # We are done with the DESCRIPTION
VistA.wait('COLUMN WIDTH')
VistA.write('80') # Default to 80 characters
VistA.wait('MNEMONIC WIDTH')
VistA.write('') # Ignored by GUI
VistA.wait('PATH SWITCH')
VistA.write('') # Ignored by GUI
VistA.wait('ENTRY ACTION')
VistA.write('') # Shown because we have programmer access - Ignore this field
VistA.wait('EXIT ACTION')
VistA.write('') # Shown because we have programmer access - Ignore this field
# Begin ScreenMan form
VistA.wait('Action')
VistA.write('Add')
VistA.wait('Add')
VistA.write('Menu Items') # Add Menu Items to this Order Menu
# Add items to menu - repeat for each menu item
# Begin 'Add New Orders' menu
VistA.wait('ITEM')
VistA.write('OR ADD MENU CLINICIAN')
VistA.wait('ROW')
VistA.write('1')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Add New Orders'
# Begin 'Allergies' package menu
VistA.wait('ITEM')
VistA.write('GMRAOR ALLERGY ENTER/EDIT')
VistA.wait('ROW')
VistA.write('2')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Allergies'
# Begin 'Diet' package menu
VistA.wait('ITEM')
VistA.write('FHW1')
VistA.wait('ROW')
VistA.write('3')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Diet'
# Begin 'Meds, Inpatient' package menu
VistA.wait('ITEM')
VistA.write('PSJ OR PAT OE')
VistA.wait('ROW')
VistA.write('4')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Meds, Inpatient'
# Begin 'Meds, Non-VA' package menu
VistA.wait('ITEM')
VistA.write('PSH OERR')
VistA.wait('ROW')
VistA.write('5')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Meds, Non-VA'
# Begin 'Meds, Outpatient' package menu
VistA.wait('ITEM')
VistA.write('PSO OERR')
VistA.wait('ROW')
VistA.write('6')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Meds, Outpatient'
# Begin 'IV Fluids' package menu
VistA.wait('ITEM')
VistA.write('PSJI OR PAT FLUID OE')
VistA.wait('ROW')
VistA.write('7')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'IV Fluids'
# Begin 'Lab Tests' package menu
VistA.wait('ITEM')
VistA.write('LR OTHER LAB TESTS')
VistA.wait('ROW')
VistA.write('8')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Lab Tests'
# Begin 'Imaging' package menu
VistA.wait('ITEM')
VistA.write('RA OERR EXAM')
VistA.wait('ROW')
VistA.write('9')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Imaging'
# Begin 'Consult' package menu
VistA.wait('ITEM')
VistA.write('GMRCOR CONSULT')
VistA.wait('ROW')
VistA.write('10')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Consult'
# Begin 'Procedure' package menu
VistA.wait('ITEM')
VistA.write('GMRCOR REQUEST')
VistA.wait('ROW')
VistA.write('11')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Procedure'
# Begin 'Vitals' package menu
VistA.wait('ITEM')
VistA.write('GMRVOR')
VistA.wait('CHOOSE') # There is more than one GMRVOR* menu
VistA.write('1') # GMRVOR is the entire menu name and is the first one
VistA.wait('ROW')
VistA.write('12')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Vitals'
# Begin 'Text Only Order' package menu
VistA.wait('ITEM')
VistA.write('OR GXTEXT WORD PROCESSING ORDER')
VistA.wait('ROW')
VistA.write('13')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'Text Only Order'
# Begin 'STREP TEST' quick order menu
VistA.wait('ITEM')
VistA.write('LRZ STREP TEST')
VistA.wait('ROW')
VistA.write('14')
VistA.wait('COLUMN')
VistA.write('1')
VistA.wait('DISPLAY TEXT')
VistA.write('')
VistA.wait('MNEMONIC')
VistA.write('')
# End 'STREP TEST'
VistA.wait('ITEM')
VistA.write('') # Done adding menus
VistA.wait('Action')
VistA.write('Quit') # Done editing this menu
VistA.wait('Order Menu Management') # Need to get to CPRS Manager Menu
VistA.write('General Parameter Tools')
VistA.wait('General Parameter Tools') # The System Manager has this as a secondary menu (can jump to it)
VistA.write('EP') # Edit Parameter
VistA.wait('PARAMETER DEFINITION NAME')
VistA.write('ORWDX WRITE ORDERS LIST') # Parameter used to control Write Orders list
VistA.wait('selection')
VistA.write('8') # Set it for the entire System
VistA.wait('Order Dialog')
VistA.write('ORZ GEN MED WRITE ORDERS LIST') # Order menu we want to use
VistA.write('\r\r\r\r') # we are done. Stay at the EVE menu
def addAllergiesPermission(VistA):
# Add permissions for all users to mark an Allergy as "Entered in error"
# in CPRS. Done in the CPRS Configuration menu.
# Start from the Systems Manager Menu
# Exits to Systems Manager Menu
VistA.wait('Systems Manager Menu')
VistA.write('CPRS Configuration')
VistA.wait('CPRS Configuration')
VistA.write('GUI PARAMETERS')
VistA.wait('GUI Parameters')
VistA.write('GUI Mark Allergy Entered in Error')
VistA.wait('Enter selection')
VistA.write('4\rY\r\r')
def addTemplatePermission(VistA,init):
# Add permission for the Nurse to create note templates that can be
# shared in the domain.
VistA.wait('Systems Manager Menu')
VistA.write('TIU Maintenance')
VistA.wait('TIU Maintenance')
VistA.write('User Class Management')
VistA.wait('User Class Management')
VistA.write('List Membership by User')
VistA.wait('Select USER')
VistA.write('MS\rAdd\rClinical Coordinator\rT-1\r\r\r')
VistA.wait('Option')
VistA.write('\r')
def createClinic(VistA,name,abbrv,service):
# Add clinic via the XUP menu to allow scheduling
# Clinic Information:
# Clinic meets at the Facility: Yes
# Non-Count clinic: No
# Stop Code: 301 (General Internal Medicine)
# Allowable consecutive no-shows: 0
# Max # days for booking in future: 90
# Time for Clinic start: 80
# Max # days for Auto-rebook: 90
# Maximum Overbooks per day: 0
# Length of Appointment: 30
# Variable Length Appointments?: Y
# Display increments per hour: 2
VistA.wait('Systems Manager Menu')
VistA.write('Core Applications')
VistA.wait('Core Applications')
VistA.write('Scheduling Manager')
VistA.wait('Scheduling Manager\'s Menu')
VistA.write('Supervisor Menu')
VistA.wait('Supervisor Menu')
VistA.write('Set up a Clinic')
VistA.wait('Select CLINIC NAME')
VistA.write(name)
VistA.wait('Are you adding')
VistA.write('Y')
VistA.wait('NAME')
VistA.write('')
VistA.wait('ABBREVIATION')
VistA.write(abbrv)
VistA.wait('CLINIC MEETS')
VistA.write('Y')
VistA.wait('SERVICE')
VistA.write(service)
VistA.wait('NON-COUNT CLINIC')
VistA.write('N')
VistA.wait('STOP CODE NUMBER')
VistA.write('301\r\r')
VistA.wait('TELEPHONE')
VistA.write('555-555-1414\r\r\r\r\r\r\r\r\r\r\r')
VistA.wait('ALLOWABLE CONSECUTIVE NO-SHOWS')
VistA.write('0')
VistA.wait('FUTURE BOOKING')
VistA.write('90')
VistA.wait('HOUR CLINIC DISPLAY BEGINS')
VistA.write('8\r')
VistA.wait('AUTO-REBOOK')
VistA.write('90\r\r\r\r\r')
VistA.wait('MAXIMUM')
VistA.write('0\r')
VistA.wait('LENGTH OF APP')
VistA.write('30')
VistA.wait('VARIABLE')
VistA.write('Yes')
VistA.wait('DISPLAY INCREMENTS PER HOUR')
VistA.write('2')
# Sets availability for Clinic. Dates below are for a work week (Mon-Fri)
# Sets 4 appointment slots from 8am to 3pm with a half hour lunch break of
# no appointments. This will be set for all week days in future.
dates = ['JUL 2,2012','JUL 3,2012','JUL 4,2012','JUL 5,2012','JUL 6,2012']
for date in dates:
VistA.wait('AVAILABILITY DATE')
VistA.write(date)
VistA.wait('TIME')
VistA.write('0800-1200\r4')
VistA.wait('TIME')
VistA.write('1230-1500\r4')
VistA.wait('TIME')
VistA.write('')
VistA.wait('PATTERN OK')
VistA.write('Yes')
VistA.wait('AVAILABILITY DATE')
VistA.write('\r\r\r\r\r')
VistA.wait('Do you really want to halt?')
VistA.write('Y')
def setupElectronicSignature(VistA,AC,VC1,VC2,sigcode):
# Signs a created user into the ZU Menu system to add a signature code for
# document signing. It will force the user to change the verify code,
VistA.wait(PROMPT,60)
VistA.write('D ^ZU')
VistA.wait('ACCESS CODE:')
VistA.write(AC)
VistA.wait('VERIFY CODE:')
VistA.write(VC1)
VistA.wait('verify code:')
VistA.write(VC1)
VistA.wait('VERIFY CODE:')
VistA.write(VC2)
VistA.wait('right:')
VistA.write(VC2)
VistA.wait('TYPE NAME')
VistA.write('')
# then will enter the User's Toolbox to change the signature information.
VistA.wait('Core Applications')
VistA.write('USER\'s TOOLBOX')
VistA.wait('Toolbox')
VistA.write('ELE')
VistA.wait('INITIAL')
VistA.write('')
VistA.wait('SIGNATURE BLOCK PRINTED NAME')
VistA.write('')
VistA.wait('SIGNATURE BLOCK TITLE')
VistA.write('\r\r\r')
VistA.wait('SIGNATURE CODE')
VistA.write(sigcode)
VistA.wait('SIGNATURE CODE FOR VERIFICATION')
VistA.write(sigcode)
VistA.wait('Toolbox')
VistA.write('\r\r\r')
# Add patient through the
# Function arguments:
# VistA, Patient Name, Patient Sex,Patient DOB, Patient SSN, Patient Veteran?
def addPatient(VistA,name,sex,DOB,SSN,vet):
VistA.wait("Core Applications")
VistA.write("ADT Manager Menu")
index = VistA.multiwait(['to continue','Select ADT Manager Menu'])
if index == 0:
VistA.write('')
VistA.wait('ADT Manager Menu')
VistA.write("Registration Menu")
VistA.wait("Registration Menu")
VistA.write('Register a Patient')
index = VistA.multiwait(['PATIENT NAME',"Select 1010 printer"])
if index == 1:
VistA.write("NULL")
VistA.wait('PATIENT NAME')
VistA.write(name +'\rY')
VistA.wait('SEX')
VistA.write(sex)
VistA.wait('DATE OF BIRTH')
VistA.write(DOB)
VistA.wait('SOCIAL SECURITY NUMBER')
VistA.write(SSN)
VistA.wait('TYPE')
VistA.write('NON-VETERAN')
VistA.wait('PATIENT VETERAN')
VistA.write(vet)
VistA.wait('SERVICE CONNECTED')
VistA.write('NO')
VistA.wait('MULTIPLE BIRTH INDICATOR')
VistA.write('')
VistA.wait('//')
VistA.write('^\r')
VistA.wait('MAIDEN NAME:')
VistA.write('')
VistA.wait('[CITY]')
VistA.write('Santa Monica')
VistA.wait('[STATE]')
VistA.write('California')
VistA.wait('ALIAS')
VistA.write('')
if VistA.type=='cache':
# Enter in more information about the patient.
VistA.wait('exit:')
VistA.write('\r')
VistA.wait('Patient Data')
VistA.write('Y')
VistA.wait('QUIT')
VistA.write('4')
VistA.wait('COUNTRY')
VistA.write('')
VistA.wait('STREET ADDRESS')
VistA.write('834 Ocean Vista Avenue\r')
VistA.wait('ZIP')
VistA.write('90401')
VistA.wait('CITY')
VistA.write('1')
VistA.wait('PHONE NUMBER')
VistA.write('310-555-2233\r\r')
VistA.wait('changes')
VistA.write('Y\r')
VistA.wait('QUIT')
VistA.write('\r\r')
VistA.wait('QUIT')
VistA.write('1')
VistA.wait('PRIMARY NOK')
VistA.write('Carter,David J Sr')
VistA.wait('RELATIONSHIP')
VistA.write('FATHER')
VistA.wait('ADDRESS')
VistA.write('Y')
VistA.wait('WORK PHONE')
VistA.write('310-555-9876\r^')
VistA.wait('condition')
VistA.write('N')
VistA.wait('today')
VistA.write('Y')
VistA.wait('Registration login')
VistA.write('NOW')
VistA.wait(PROMPT) |
py | b4163c24b02abe6bfc6fdde08b11cc03a6460f0d | # # product
import calendar as tcalendar
import logging
import base64
from collections import OrderedDict
from datetime import datetime, date, timedelta
from math import ceil
from dateutil.relativedelta import relativedelta
from django.contrib import messages
from django.core.exceptions import PermissionDenied, ValidationError
from django.urls import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.utils import timezone
from django.db.models import Sum, Count, Q, Max
from django.contrib.admin.utils import NestedObjects
from django.db import DEFAULT_DB_ALIAS, connection
from dojo.templatetags.display_tags import get_level
from dojo.filters import ProductEngagementFilter, ProductFilter, EngagementFilter, ProductMetricsEndpointFilter, ProductMetricsFindingFilter, ProductComponentFilter
from dojo.forms import ProductForm, EngForm, DeleteProductForm, DojoMetaDataForm, JIRAProjectForm, JIRAFindingForm, AdHocFindingForm, \
EngagementPresetsForm, DeleteEngagementPresetsForm, Sonarqube_ProductForm, ProductNotificationsForm, \
GITHUB_Product_Form, GITHUBFindingForm, App_AnalysisTypeForm, JIRAEngagementForm, Add_Product_MemberForm, \
Edit_Product_MemberForm, Delete_Product_MemberForm
from dojo.models import Product_Type, Note_Type, Finding, Product, Engagement, Test, GITHUB_PKey, Finding_Template, \
Test_Type, System_Settings, Languages, App_Analysis, Benchmark_Type, Benchmark_Product_Summary, Endpoint_Status, \
Endpoint, Engagement_Presets, DojoMeta, Sonarqube_Product, Notifications, BurpRawRequestResponse, Product_Member
from dojo.utils import add_external_issue, add_error_message_to_response, add_field_errors_to_response, get_page_items, add_breadcrumb, \
get_system_setting, Product_Tab, get_punchcard_data, queryset_check
from dojo.notifications.helper import create_notification
from django.db.models import Prefetch, F, OuterRef, Subquery
from django.db.models.query import QuerySet
from github import Github
from django.contrib.postgres.aggregates import StringAgg
from dojo.components.sql_group_concat import Sql_GroupConcat
import dojo.jira_link.helper as jira_helper
from dojo.authorization.authorization import user_has_permission, user_has_permission_or_403
from django.conf import settings
from dojo.authorization.roles_permissions import Permissions, Roles
from dojo.authorization.authorization_decorators import user_is_authorized
from dojo.product.queries import get_authorized_products, get_authorized_product_members
from dojo.product_type.queries import get_authorized_members
logger = logging.getLogger(__name__)
def product(request):
# validate prod_type param
product_type = None
if 'prod_type' in request.GET:
p = request.GET.getlist('prod_type', [])
if len(p) == 1:
product_type = get_object_or_404(Product_Type, id=p[0])
prods = get_authorized_products(Permissions.Product_View)
# perform all stuff for filtering and pagination first, before annotation/prefetching
# otherwise the paginator will perform all the annotations/prefetching already only to count the total number of records
# see https://code.djangoproject.com/ticket/23771 and https://code.djangoproject.com/ticket/25375
name_words = prods.values_list('name', flat=True)
prod_filter = ProductFilter(request.GET, queryset=prods, user=request.user)
prod_list = get_page_items(request, prod_filter.qs, 25)
# perform annotation/prefetching by replacing the queryset in the page with an annotated/prefetched queryset.
prod_list.object_list = prefetch_for_product(prod_list.object_list)
# print(prod_list.object_list.explain)
add_breadcrumb(title="Product List", top_level=not len(request.GET), request=request)
return render(request,
'dojo/product.html',
{'prod_list': prod_list,
'prod_filter': prod_filter,
'name_words': sorted(set(name_words)),
'user': request.user})
def prefetch_for_product(prods):
prefetched_prods = prods
if isinstance(prods,
QuerySet): # old code can arrive here with prods being a list because the query was already executed
prefetched_prods = prefetched_prods.prefetch_related('team_manager')
prefetched_prods = prefetched_prods.prefetch_related('product_manager')
prefetched_prods = prefetched_prods.prefetch_related('technical_contact')
prefetched_prods = prefetched_prods.annotate(
active_engagement_count=Count('engagement__id', filter=Q(engagement__active=True)))
prefetched_prods = prefetched_prods.annotate(
closed_engagement_count=Count('engagement__id', filter=Q(engagement__active=False)))
prefetched_prods = prefetched_prods.annotate(last_engagement_date=Max('engagement__target_start'))
prefetched_prods = prefetched_prods.annotate(active_finding_count=Count('engagement__test__finding__id',
filter=Q(
engagement__test__finding__active=True)))
prefetched_prods = prefetched_prods.annotate(active_verified_finding_count=Count('engagement__test__finding__id',
filter=Q(
engagement__test__finding__active=True,
engagement__test__finding__verified=True)))
prefetched_prods = prefetched_prods.prefetch_related('jira_project_set__jira_instance')
prefetched_prods = prefetched_prods.prefetch_related('authorized_users')
prefetched_prods = prefetched_prods.prefetch_related('prod_type__authorized_users')
prefetched_prods = prefetched_prods.prefetch_related('members')
prefetched_prods = prefetched_prods.prefetch_related('prod_type__members')
active_endpoint_query = Endpoint.objects.filter(
finding__active=True,
finding__mitigated__isnull=True)
prefetched_prods = prefetched_prods.prefetch_related(
Prefetch('endpoint_set', queryset=active_endpoint_query, to_attr='active_endpoints'))
prefetched_prods = prefetched_prods.prefetch_related('tags')
if get_system_setting('enable_github'):
prefetched_prods = prefetched_prods.prefetch_related(
Prefetch('github_pkey_set', queryset=GITHUB_PKey.objects.all().select_related('git_conf'),
to_attr='github_confs'))
else:
logger.debug('unable to prefetch because query was already executed')
return prefetched_prods
def iso_to_gregorian(iso_year, iso_week, iso_day):
jan4 = date(iso_year, 1, 4)
start = jan4 - timedelta(days=jan4.isoweekday() - 1)
return start + timedelta(weeks=iso_week - 1, days=iso_day - 1)
@user_is_authorized(Product, Permissions.Product_View, 'pid', 'view')
def view_product(request, pid):
prod_query = Product.objects.all().select_related('product_manager', 'technical_contact', 'team_manager') \
.prefetch_related('authorized_users') \
.prefetch_related('members') \
.prefetch_related('prod_type__members')
prod = get_object_or_404(prod_query, id=pid)
product_members = get_authorized_product_members(prod, Permissions.Product_View)
product_type_members = get_authorized_members(prod.prod_type, Permissions.Product_Type_View)
personal_notifications_form = ProductNotificationsForm(
instance=Notifications.objects.filter(user=request.user).filter(product=prod).first())
langSummary = Languages.objects.filter(product=prod).aggregate(Sum('files'), Sum('code'), Count('files'))
languages = Languages.objects.filter(product=prod).order_by('-code')
app_analysis = App_Analysis.objects.filter(product=prod).order_by('name')
benchmark_type = Benchmark_Type.objects.filter(enabled=True).order_by('name')
benchmarks = Benchmark_Product_Summary.objects.filter(product=prod, publish=True,
benchmark_type__enabled=True).order_by('benchmark_type__name')
benchAndPercent = []
for i in range(0, len(benchmarks)):
benchAndPercent.append([benchmarks[i].benchmark_type, get_level(benchmarks[i])])
system_settings = System_Settings.objects.get()
product_metadata = dict(prod.product_meta.order_by('name').values_list('name', 'value'))
open_findings = Finding.objects.filter(test__engagement__product=prod,
false_p=False,
active=True,
duplicate=False,
out_of_scope=False).order_by('numerical_severity').values(
'severity').annotate(count=Count('severity'))
critical = 0
high = 0
medium = 0
low = 0
info = 0
for v in open_findings:
if v["severity"] == "Critical":
critical = v["count"]
elif v["severity"] == "High":
high = v["count"]
elif v["severity"] == "Medium":
medium = v["count"]
elif v["severity"] == "Low":
low = v["count"]
elif v["severity"] == "Info":
info = v["count"]
total = critical + high + medium + low + info
product_tab = Product_Tab(pid, title="Product", tab="overview")
return render(request, 'dojo/view_product_details.html', {
'prod': prod,
'product_tab': product_tab,
'product_metadata': product_metadata,
'critical': critical,
'high': high,
'medium': medium,
'low': low,
'info': info,
'total': total,
'user': request.user,
'languages': languages,
'langSummary': langSummary,
'app_analysis': app_analysis,
'system_settings': system_settings,
'benchmarks_percents': benchAndPercent,
'benchmarks': benchmarks,
'product_members': product_members,
'product_type_members': product_type_members,
'personal_notifications_form': personal_notifications_form})
@user_is_authorized(Product, Permissions.Component_View, 'pid', 'view')
def view_product_components(request, pid):
prod = get_object_or_404(Product, id=pid)
product_tab = Product_Tab(pid, title="Product", tab="components")
separator = ', '
# Get components ordered by component_name and concat component versions to the same row
if connection.vendor == 'postgresql':
component_query = Finding.objects.filter(test__engagement__product__id=pid).values("component_name").order_by(
'component_name').annotate(
component_version=StringAgg('component_version', delimiter=separator, distinct=True))
else:
component_query = Finding.objects.filter(test__engagement__product__id=pid).values("component_name")
component_query = component_query.annotate(
component_version=Sql_GroupConcat('component_version', separator=separator, distinct=True))
# Append finding counts
component_query = component_query.annotate(total=Count('id')).order_by('component_name', 'component_version')
component_query = component_query.annotate(actives=Count('id', filter=Q(active=True)))
component_query = component_query.annotate(duplicate=(Count('id', filter=Q(duplicate=True))))
# Default sort by total descending
component_query = component_query.order_by('-total')
comp_filter = ProductComponentFilter(request.GET, queryset=component_query)
result = get_page_items(request, comp_filter.qs, 25)
# Filter out None values for auto-complete
component_words = component_query.exclude(component_name__isnull=True).values_list('component_name', flat=True)
return render(request, 'dojo/product_components.html', {
'prod': prod,
'filter': comp_filter,
'product_tab': product_tab,
'result': result,
'component_words': sorted(set(component_words))
})
def identify_view(request):
get_data = request.GET
view = get_data.get('type', None)
if view:
# value of view is reflected in the template, make sure it's valid
# although any XSS should be catch by django autoescape, we see people sometimes using '|safe'...
if view in ['Endpoint', 'Finding']:
return view
raise ValueError('invalid view, view must be "Endpoint" or "Finding"')
else:
if get_data.get('finding__severity', None):
return 'Endpoint'
elif get_data.get('false_positive', None):
return 'Endpoint'
referer = request.META.get('HTTP_REFERER', None)
if referer:
if referer.find('type=Endpoint') > -1:
return 'Endpoint'
return 'Finding'
def finding_querys(request, prod):
filters = dict()
findings_query = Finding.objects.filter(test__engagement__product=prod,
severity__in=('Critical', 'High', 'Medium', 'Low', 'Info'))
# prefetch only what's needed to avoid lots of repeated queries
findings_query = findings_query.prefetch_related(
# 'test__engagement',
# 'test__engagement__risk_acceptance',
# 'found_by',
# 'test',
# 'test__test_type',
# 'risk_acceptance_set',
'reporter')
findings = ProductMetricsFindingFilter(request.GET, queryset=findings_query, pid=prod)
findings_qs = queryset_check(findings)
filters['form'] = findings.form
# dead code:
# if not findings_qs and not findings_query:
# # logger.debug('all filtered')
# findings = findings_query
# findings_qs = queryset_check(findings)
# messages.add_message(request,
# messages.ERROR,
# 'All objects have been filtered away. Displaying all objects',
# extra_tags='alert-danger')
try:
# logger.debug(findings_qs.query)
start_date = findings_qs.earliest('date').date
start_date = datetime(start_date.year,
start_date.month, start_date.day,
tzinfo=timezone.get_current_timezone())
end_date = findings_qs.latest('date').date
end_date = datetime(end_date.year,
end_date.month, end_date.day,
tzinfo=timezone.get_current_timezone())
except Exception as e:
logger.debug(e)
start_date = timezone.now()
end_date = timezone.now()
week = end_date - timedelta(days=7) # seven days and /newnewer are considered "new"
# risk_acceptances = Risk_Acceptance.objects.filter(engagement__in=Engagement.objects.filter(product=prod)).prefetch_related('accepted_findings')
# filters['accepted'] = [finding for ra in risk_acceptances for finding in ra.accepted_findings.all()]
from dojo.finding.views import ACCEPTED_FINDINGS_QUERY
filters['accepted'] = Finding.objects.filter(test__engagement__product=prod).filter(ACCEPTED_FINDINGS_QUERY).distinct()
filters['verified'] = findings_qs.filter(date__range=[start_date, end_date],
false_p=False,
active=True,
verified=True,
duplicate=False,
out_of_scope=False).order_by("date")
filters['new_verified'] = findings_qs.filter(date__range=[week, end_date],
false_p=False,
verified=True,
active=True,
duplicate=False,
out_of_scope=False).order_by("date")
filters['open'] = findings_qs.filter(date__range=[start_date, end_date],
false_p=False,
duplicate=False,
out_of_scope=False,
active=True,
is_Mitigated=False)
filters['inactive'] = findings_qs.filter(date__range=[start_date, end_date],
false_p=False,
duplicate=False,
out_of_scope=False,
active=False,
is_Mitigated=False)
filters['closed'] = findings_qs.filter(date__range=[start_date, end_date],
false_p=False,
duplicate=False,
out_of_scope=False,
active=False,
is_Mitigated=True)
filters['false_positive'] = findings_qs.filter(date__range=[start_date, end_date],
false_p=True,
duplicate=False,
out_of_scope=False)
filters['out_of_scope'] = findings_qs.filter(date__range=[start_date, end_date],
false_p=False,
duplicate=False,
out_of_scope=True)
filters['all'] = findings_qs
filters['open_vulns'] = findings_qs.filter(
false_p=False,
duplicate=False,
out_of_scope=False,
active=True,
mitigated__isnull=True,
cwe__isnull=False,
).order_by('cwe').values(
'cwe'
).annotate(
count=Count('cwe')
)
filters['all_vulns'] = findings_qs.filter(
duplicate=False,
cwe__isnull=False,
).order_by('cwe').values(
'cwe'
).annotate(
count=Count('cwe')
)
filters['start_date'] = start_date
filters['end_date'] = end_date
filters['week'] = week
return filters
def endpoint_querys(request, prod):
filters = dict()
endpoints_query = Endpoint_Status.objects.filter(finding__test__engagement__product=prod,
finding__severity__in=(
'Critical', 'High', 'Medium', 'Low', 'Info')).prefetch_related(
'finding__test__engagement',
'finding__test__engagement__risk_acceptance',
'finding__risk_acceptance_set',
'finding__reporter').annotate(severity=F('finding__severity'))
endpoints = ProductMetricsEndpointFilter(request.GET, queryset=endpoints_query)
endpoints_qs = queryset_check(endpoints)
filters['form'] = endpoints.form
if not endpoints_qs and not endpoints_query:
endpoints = endpoints_query
endpoints_qs = queryset_check(endpoints)
messages.add_message(request,
messages.ERROR,
'All objects have been filtered away. Displaying all objects',
extra_tags='alert-danger')
try:
start_date = endpoints_qs.earliest('date').date
start_date = datetime(start_date.year,
start_date.month, start_date.day,
tzinfo=timezone.get_current_timezone())
end_date = endpoints_qs.latest('date').date
end_date = datetime(end_date.year,
end_date.month, end_date.day,
tzinfo=timezone.get_current_timezone())
except:
start_date = timezone.now()
end_date = timezone.now()
week = end_date - timedelta(days=7) # seven days and /newnewer are considered "new"
filters['accepted'] = endpoints_qs.filter(date__range=[start_date, end_date],
risk_accepted=True).order_by("date")
filters['verified'] = endpoints_qs.filter(date__range=[start_date, end_date],
false_positive=False,
mitigated=True,
out_of_scope=False).order_by("date")
filters['new_verified'] = endpoints_qs.filter(date__range=[week, end_date],
false_positive=False,
mitigated=True,
out_of_scope=False).order_by("date")
filters['open'] = endpoints_qs.filter(date__range=[start_date, end_date],
mitigated=False)
filters['inactive'] = endpoints_qs.filter(date__range=[start_date, end_date],
mitigated=True)
filters['closed'] = endpoints_qs.filter(date__range=[start_date, end_date],
mitigated=True)
filters['false_positive'] = endpoints_qs.filter(date__range=[start_date, end_date],
false_positive=True)
filters['out_of_scope'] = endpoints_qs.filter(date__range=[start_date, end_date],
out_of_scope=True)
filters['all'] = endpoints_qs
filters['open_vulns'] = endpoints_qs.filter(
false_positive=False,
out_of_scope=False,
mitigated=True,
finding__cwe__isnull=False,
).order_by('finding__cwe').values(
'finding__cwe'
).annotate(
count=Count('finding__cwe')
)
filters['all_vulns'] = endpoints_qs.filter(
finding__cwe__isnull=False,
).order_by('finding__cwe').values(
'finding__cwe'
).annotate(
count=Count('finding__cwe')
)
filters['start_date'] = start_date
filters['end_date'] = end_date
filters['week'] = week
return filters
@user_is_authorized(Product, Permissions.Product_View, 'pid', 'view')
def view_product_metrics(request, pid):
prod = get_object_or_404(Product, id=pid)
engs = Engagement.objects.filter(product=prod, active=True)
view = identify_view(request)
result = EngagementFilter(
request.GET,
queryset=Engagement.objects.filter(product=prod, active=False).order_by('-target_end'))
inactive_engs_page = get_page_items(request, result.qs, 10)
filters = dict()
if view == 'Finding':
filters = finding_querys(request, prod)
elif view == 'Endpoint':
filters = endpoint_querys(request, prod)
start_date = filters['start_date']
end_date = filters['end_date']
week_date = filters['week']
tests = Test.objects.filter(engagement__product=prod).prefetch_related('finding_set', 'test_type')
tests = tests.annotate(verified_finding_count=Count('finding__id', filter=Q(finding__verified=True)))
open_vulnerabilities = filters['open_vulns']
all_vulnerabilities = filters['all_vulns']
start_date = timezone.make_aware(datetime.combine(start_date, datetime.min.time()))
r = relativedelta(end_date, start_date)
weeks_between = int(ceil((((r.years * 12) + r.months) * 4.33) + (r.days / 7)))
if weeks_between <= 0:
weeks_between += 2
punchcard, ticks = get_punchcard_data(filters.get('open', None), start_date, weeks_between, view)
add_breadcrumb(parent=prod, top_level=False, request=request)
open_close_weekly = OrderedDict()
new_weekly = OrderedDict()
severity_weekly = OrderedDict()
critical_weekly = OrderedDict()
high_weekly = OrderedDict()
medium_weekly = OrderedDict()
for v in filters.get('open', None):
iso_cal = v.date.isocalendar()
x = iso_to_gregorian(iso_cal[0], iso_cal[1], 1)
y = x.strftime("<span class='small'>%m/%d<br/>%Y</span>")
x = (tcalendar.timegm(x.timetuple()) * 1000)
if x not in critical_weekly:
critical_weekly[x] = {'count': 0, 'week': y}
if x not in high_weekly:
high_weekly[x] = {'count': 0, 'week': y}
if x not in medium_weekly:
medium_weekly[x] = {'count': 0, 'week': y}
if x in open_close_weekly:
if v.mitigated:
open_close_weekly[x]['closed'] += 1
else:
open_close_weekly[x]['open'] += 1
else:
if v.mitigated:
open_close_weekly[x] = {'closed': 1, 'open': 0, 'accepted': 0}
else:
open_close_weekly[x] = {'closed': 0, 'open': 1, 'accepted': 0}
open_close_weekly[x]['week'] = y
if view == 'Finding':
severity = v.severity
elif view == 'Endpoint':
severity = v.finding.severity
if x in severity_weekly:
if severity in severity_weekly[x]:
severity_weekly[x][severity] += 1
else:
severity_weekly[x][severity] = 1
else:
severity_weekly[x] = {'Critical': 0, 'High': 0,
'Medium': 0, 'Low': 0, 'Info': 0}
severity_weekly[x][severity] = 1
severity_weekly[x]['week'] = y
if severity == 'Critical':
if x in critical_weekly:
critical_weekly[x]['count'] += 1
else:
critical_weekly[x] = {'count': 1, 'week': y}
elif severity == 'High':
if x in high_weekly:
high_weekly[x]['count'] += 1
else:
high_weekly[x] = {'count': 1, 'week': y}
elif severity == 'Medium':
if x in medium_weekly:
medium_weekly[x]['count'] += 1
else:
medium_weekly[x] = {'count': 1, 'week': y}
for a in filters.get('accepted', None):
if view == 'Finding':
finding = a
elif view == 'Endpoint':
finding = v.finding
iso_cal = a.date.isocalendar()
x = iso_to_gregorian(iso_cal[0], iso_cal[1], 1)
y = x.strftime("<span class='small'>%m/%d<br/>%Y</span>")
x = (tcalendar.timegm(x.timetuple()) * 1000)
if x in open_close_weekly:
open_close_weekly[x]['accepted'] += 1
else:
open_close_weekly[x] = {'closed': 0, 'open': 0, 'accepted': 1}
open_close_weekly[x]['week'] = y
test_data = {}
for t in tests:
if t.test_type.name in test_data:
test_data[t.test_type.name] += t.verified_finding_count
else:
test_data[t.test_type.name] = t.verified_finding_count
product_tab = Product_Tab(pid, title="Product", tab="metrics")
return render(request,
'dojo/product_metrics.html',
{'prod': prod,
'product_tab': product_tab,
'engs': engs,
'inactive_engs': inactive_engs_page,
'view': view,
'verified_objs': filters.get('verified', None),
'open_objs': filters.get('open', None),
'inactive_objs': filters.get('inactive', None),
'closed_objs': filters.get('closed', None),
'false_positive_objs': filters.get('false_positive', None),
'out_of_scope_objs': filters.get('out_of_scope', None),
'accepted_objs': filters.get('accepted', None),
'new_objs': filters.get('new_verified', None),
'all_objs': filters.get('all', None),
'form': filters.get('form', None),
'reset_link': reverse('view_product_metrics', args=(prod.id,)) + '?type=' + view,
'open_vulnerabilities': open_vulnerabilities,
'all_vulnerabilities': all_vulnerabilities,
'start_date': start_date,
'punchcard': punchcard,
'ticks': ticks,
'open_close_weekly': open_close_weekly,
'severity_weekly': severity_weekly,
'critical_weekly': critical_weekly,
'high_weekly': high_weekly,
'medium_weekly': medium_weekly,
'test_data': test_data,
'user': request.user})
@user_is_authorized(Product, Permissions.Engagement_View, 'pid', 'view')
def view_engagements(request, pid):
prod = get_object_or_404(Product, id=pid)
default_page_num = 10
recent_test_day_count = 7
# In Progress Engagements
engs = Engagement.objects.filter(product=prod, active=True, status="In Progress").order_by('-updated')
active_engs_filter = ProductEngagementFilter(request.GET, queryset=engs, prefix='active')
result_active_engs = get_page_items(request, active_engs_filter.qs, default_page_num, prefix="engs")
# prefetch only after creating the filters to avoid https://code.djangoproject.com/ticket/23771 and https://code.djangoproject.com/ticket/25375
result_active_engs.object_list = prefetch_for_view_engagements(result_active_engs.object_list, recent_test_day_count)
# Engagements that are queued because they haven't started or paused
engs = Engagement.objects.filter(~Q(status="In Progress"), product=prod, active=True).order_by('-updated')
queued_engs_filter = ProductEngagementFilter(request.GET, queryset=engs, prefix='queued')
result_queued_engs = get_page_items(request, queued_engs_filter.qs, default_page_num, prefix="queued_engs")
result_queued_engs.object_list = prefetch_for_view_engagements(result_queued_engs.object_list, recent_test_day_count)
# Cancelled or Completed Engagements
engs = Engagement.objects.filter(product=prod, active=False).order_by('-target_end')
inactive_engs_filter = ProductEngagementFilter(request.GET, queryset=engs, prefix='closed')
result_inactive_engs = get_page_items(request, inactive_engs_filter.qs, default_page_num, prefix="inactive_engs")
result_inactive_engs.object_list = prefetch_for_view_engagements(result_inactive_engs.object_list, recent_test_day_count)
title = "All Engagements"
product_tab = Product_Tab(pid, title=title, tab="engagements")
return render(request,
'dojo/view_engagements.html',
{'prod': prod,
'product_tab': product_tab,
'engs': result_active_engs,
'engs_count': result_active_engs.paginator.count,
'engs_filter': active_engs_filter,
'queued_engs': result_queued_engs,
'queued_engs_count': result_queued_engs.paginator.count,
'queued_engs_filter': queued_engs_filter,
'inactive_engs': result_inactive_engs,
'inactive_engs_count': result_inactive_engs.paginator.count,
'inactive_engs_filter': inactive_engs_filter,
'recent_test_day_count': recent_test_day_count,
'user': request.user})
def prefetch_for_view_engagements(engagements, recent_test_day_count):
engagements = engagements.select_related(
'lead'
).prefetch_related(
Prefetch('test_set', queryset=Test.objects.filter(
id__in=Subquery(
Test.objects.filter(
engagement_id=OuterRef('engagement_id'),
updated__gte=timezone.now() - timedelta(days=recent_test_day_count)
).values_list('id', flat=True)
))
),
'test_set__test_type',
).annotate(
count_tests=Count('test', distinct=True),
count_findings_all=Count('test__finding__id'),
count_findings_open=Count('test__finding__id', filter=Q(test__finding__active=True)),
count_findings_open_verified=Count('test__finding__id', filter=Q(test__finding__active=True) & Q(test__finding__verified=True)),
count_findings_close=Count('test__finding__id', filter=Q(test__finding__is_Mitigated=True)),
count_findings_duplicate=Count('test__finding__id', filter=Q(test__finding__duplicate=True)),
count_findings_accepted=Count('test__finding__id', filter=Q(test__finding__risk_accepted=True)),
)
if System_Settings.objects.get().enable_jira:
engagements = engagements.prefetch_related(
'jira_project__jira_instance',
'product__jira_project_set__jira_instance',
)
return engagements
# Authorization is within the import_scan_results method
def import_scan_results_prod(request, pid=None):
from dojo.engagement.views import import_scan_results
return import_scan_results(request, pid=pid)
def new_product(request, ptid=None):
jira_project_form = None
error = False
initial = None
if ptid is not None:
prod_type = get_object_or_404(Product_Type, pk=ptid)
initial = {'prod_type': prod_type}
form = ProductForm(initial=initial)
if request.method == 'POST':
form = ProductForm(request.POST, instance=Product())
if get_system_setting('enable_github'):
gform = GITHUB_Product_Form(request.POST, instance=GITHUB_PKey())
else:
gform = None
if form.is_valid():
if settings.FEATURE_AUTHORIZATION_V2:
product_type = form.instance.prod_type
user_has_permission_or_403(request.user, product_type, Permissions.Product_Type_Add_Product)
else:
if not request.user.is_staff:
raise PermissionDenied
product = form.save()
messages.add_message(request,
messages.SUCCESS,
'Product added successfully.',
extra_tags='alert-success')
success, jira_project_form = jira_helper.process_jira_project_form(request, product=product)
error = not success
if get_system_setting('enable_github'):
if gform.is_valid():
github_pkey = gform.save(commit=False)
if github_pkey.git_conf is not None and github_pkey.git_project:
github_pkey.product = product
github_pkey.save()
messages.add_message(request,
messages.SUCCESS,
'GitHub information added successfully.',
extra_tags='alert-success')
# Create appropriate labels in the repo
logger.info('Create label in repo: ' + github_pkey.git_project)
try:
g = Github(github_pkey.git_conf.api_key)
repo = g.get_repo(github_pkey.git_project)
repo.create_label(name="security", color="FF0000",
description="This label is automatically applied to all issues created by DefectDojo")
repo.create_label(name="security / info", color="00FEFC",
description="This label is automatically applied to all issues created by DefectDojo")
repo.create_label(name="security / low", color="B7FE00",
description="This label is automatically applied to all issues created by DefectDojo")
repo.create_label(name="security / medium", color="FEFE00",
description="This label is automatically applied to all issues created by DefectDojo")
repo.create_label(name="security / high", color="FE9A00",
description="This label is automatically applied to all issues created by DefectDojo")
repo.create_label(name="security / critical", color="FE2200",
description="This label is automatically applied to all issues created by DefectDojo")
except:
logger.info('Labels cannot be created - they may already exists')
# SonarQube API Configuration
sonarqube_form = Sonarqube_ProductForm(request.POST)
if sonarqube_form.is_valid():
sonarqube_product = sonarqube_form.save(commit=False)
sonarqube_product.product = product
sonarqube_product.save()
create_notification(event='product_added', title=product.name,
url=reverse('view_product', args=(product.id,)))
if not error:
return HttpResponseRedirect(reverse('view_product', args=(product.id,)))
else:
# engagement was saved, but JIRA errors, so goto edit_product
return HttpResponseRedirect(reverse('edit_product', args=(product.id,)))
else:
if get_system_setting('enable_jira'):
jira_project_form = JIRAProjectForm()
if get_system_setting('enable_github'):
gform = GITHUB_Product_Form()
else:
gform = None
add_breadcrumb(title="New Product", top_level=False, request=request)
return render(request, 'dojo/new_product.html',
{'form': form,
'jform': jira_project_form,
'gform': gform,
'sonarqube_form': Sonarqube_ProductForm()})
@user_is_authorized(Product, Permissions.Product_Edit, 'pid', 'staff')
def edit_product(request, pid):
product = Product.objects.get(pk=pid)
system_settings = System_Settings.objects.get()
jira_enabled = system_settings.enable_jira
jira_project = None
jform = None
github_enabled = system_settings.enable_github
github_inst = None
gform = None
sonarqube_form = None
error = False
try:
github_inst = GITHUB_PKey.objects.get(product=product)
except:
github_inst = None
pass
sonarqube_conf = Sonarqube_Product.objects.filter(product=product).first()
if request.method == 'POST':
form = ProductForm(request.POST, instance=product)
jira_project = jira_helper.get_jira_project(product)
if form.is_valid():
form.save()
tags = request.POST.getlist('tags')
messages.add_message(request,
messages.SUCCESS,
'Product updated successfully.',
extra_tags='alert-success')
success, jform = jira_helper.process_jira_project_form(request, instance=jira_project, product=product)
error = not success
if get_system_setting('enable_github') and github_inst:
gform = GITHUB_Product_Form(request.POST, instance=github_inst)
# need to handle delete
try:
gform.save()
except:
pass
elif get_system_setting('enable_github'):
gform = GITHUB_Product_Form(request.POST)
if gform.is_valid():
new_conf = gform.save(commit=False)
new_conf.product_id = pid
new_conf.save()
messages.add_message(request,
messages.SUCCESS,
'GITHUB information updated successfully.',
extra_tags='alert-success')
# SonarQube API Configuration
sonarqube_form = Sonarqube_ProductForm(request.POST, instance=sonarqube_conf)
if sonarqube_form.is_valid():
new_conf = sonarqube_form.save(commit=False)
new_conf.product_id = pid
new_conf.save()
if not error:
return HttpResponseRedirect(reverse('view_product', args=(pid,)))
else:
form = ProductForm(instance=product,
initial={'auth_users': product.authorized_users.all()})
if jira_enabled:
jira_project = jira_helper.get_jira_project(product)
jform = JIRAProjectForm(instance=jira_project)
else:
jform = None
if github_enabled and (github_inst is not None):
if github_inst is not None:
gform = GITHUB_Product_Form(instance=github_inst)
gform = GITHUB_Product_Form()
gform = GITHUB_Product_Form()
else:
gform = None
sonarqube_form = Sonarqube_ProductForm(instance=sonarqube_conf)
product_tab = Product_Tab(pid, title="Edit Product", tab="settings")
return render(request,
'dojo/edit_product.html',
{'form': form,
'product_tab': product_tab,
'jform': jform,
'gform': gform,
'sonarqube_form': sonarqube_form,
'product': product
})
@user_is_authorized(Product, Permissions.Product_Delete, 'pid', 'delete')
def delete_product(request, pid):
product = get_object_or_404(Product, pk=pid)
form = DeleteProductForm(instance=product)
if request.method == 'POST':
if 'id' in request.POST and str(product.id) == request.POST['id']:
form = DeleteProductForm(request.POST, instance=product)
if form.is_valid():
product.delete()
messages.add_message(request,
messages.SUCCESS,
'Product and relationships removed.',
extra_tags='alert-success')
create_notification(event='other',
title='Deletion of %s' % product.name,
description='The product "%s" was deleted by %s' % (product.name, request.user),
url=request.build_absolute_uri(reverse('product')),
icon="exclamation-triangle")
return HttpResponseRedirect(reverse('product'))
collector = NestedObjects(using=DEFAULT_DB_ALIAS)
collector.collect([product])
rels = collector.nested()
product_tab = Product_Tab(pid, title="Product", tab="settings")
return render(request, 'dojo/delete_product.html',
{'product': product,
'form': form,
'product_tab': product_tab,
'rels': rels,
})
@user_is_authorized(Product, Permissions.Engagement_Add, 'pid', 'staff')
def new_eng_for_app(request, pid, cicd=False):
jira_project = None
jira_project_form = None
jira_epic_form = None
product = Product.objects.get(id=pid)
jira_error = False
if not user_is_authorized(request.user, 'staff', product):
raise PermissionDenied
if request.method == 'POST':
form = EngForm(request.POST, cicd=cicd, product=product, user=request.user)
jira_project = jira_helper.get_jira_project(product)
logger.debug('new_eng_for_app')
if form.is_valid():
# first create the new engagement
engagement = form.save(commit=False)
if not engagement.name:
engagement.name = str(engagement.target_start)
engagement.threat_model = False
engagement.api_test = False
engagement.pen_test = False
engagement.check_list = False
engagement.product = form.cleaned_data.get('product')
if engagement.threat_model:
engagement.progress = 'threat_model'
else:
engagement.progress = 'other'
if cicd:
engagement.engagement_type = 'CI/CD'
engagement.status = "In Progress"
engagement.active = True
engagement.save()
form.save_m2m()
logger.debug('new_eng_for_app: process jira coming')
# new engagement, so do not provide jira_project
success, jira_project_form = jira_helper.process_jira_project_form(request, instance=None,
engagement=engagement)
error = not success
logger.debug('new_eng_for_app: process jira epic coming')
success, jira_epic_form = jira_helper.process_jira_epic_form(request, engagement=engagement)
error = error or not success
create_notification(event='engagement_added', title=engagement.name + " for " + product.name,
engagement=engagement, url=reverse('view_engagement', args=(engagement.id,)),
objowner=engagement.lead)
messages.add_message(request,
messages.SUCCESS,
'Engagement added successfully.',
extra_tags='alert-success')
if not error:
if "_Add Tests" in request.POST:
return HttpResponseRedirect(reverse('add_tests', args=(engagement.id,)))
elif "_Import Scan Results" in request.POST:
return HttpResponseRedirect(reverse('import_scan_results', args=(engagement.id,)))
else:
return HttpResponseRedirect(reverse('view_engagement', args=(engagement.id,)))
else:
# engagement was saved, but JIRA errors, so goto edit_engagement
logger.debug('new_eng_for_app: jira errors')
return HttpResponseRedirect(reverse('edit_engagement', args=(engagement.id,)))
else:
logger.debug(form.errors)
else:
form = EngForm(initial={'lead': request.user, 'target_start': timezone.now().date(),
'target_end': timezone.now().date() + timedelta(days=7), 'product': product}, cicd=cicd,
product=product, user=request.user)
if get_system_setting('enable_jira'):
jira_project = jira_helper.get_jira_project(product)
logger.debug('showing jira-project-form')
jira_project_form = JIRAProjectForm(target='engagement', product=product)
logger.debug('showing jira-epic-form')
jira_epic_form = JIRAEngagementForm()
if cicd:
title = 'New CI/CD Engagement'
else:
title = 'New Interactive Engagement'
product_tab = Product_Tab(pid, title=title, tab="engagements")
return render(request, 'dojo/new_eng.html',
{'form': form,
'title': title,
'product_tab': product_tab,
'jira_epic_form': jira_epic_form,
'jira_project_form': jira_project_form,
})
@user_is_authorized(Product, Permissions.Product_Edit, 'pid', 'staff')
def new_tech_for_prod(request, pid):
prod = Product.objects.get(id=pid)
if request.method == 'POST':
form = App_AnalysisTypeForm(request.POST)
if form.is_valid():
tech = form.save(commit=False)
tech.product_id = pid
tech.save()
messages.add_message(request,
messages.SUCCESS,
'Technology added successfully.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('view_product', args=(pid,)))
form = App_AnalysisTypeForm()
return render(request, 'dojo/new_tech.html',
{'form': form, 'pid': pid})
@user_is_authorized(Product, Permissions.Engagement_Add, 'pid', 'staff')
def new_eng_for_app_cicd(request, pid):
# we have to use pid=pid here as new_eng_for_app expects kwargs, because that is how django calls the function based on urls.py named groups
return new_eng_for_app(request, pid=pid, cicd=True)
@user_is_authorized(Product, Permissions.Product_Edit, 'pid', 'staff')
def add_meta_data(request, pid):
prod = Product.objects.get(id=pid)
if request.method == 'POST':
form = DojoMetaDataForm(request.POST, instance=DojoMeta(product=prod))
if form.is_valid():
form.save()
messages.add_message(request,
messages.SUCCESS,
'Metadata added successfully.',
extra_tags='alert-success')
if 'add_another' in request.POST:
return HttpResponseRedirect(reverse('add_meta_data', args=(pid,)))
else:
return HttpResponseRedirect(reverse('view_product', args=(pid,)))
else:
form = DojoMetaDataForm()
product_tab = Product_Tab(pid, title="Add Metadata", tab="settings")
return render(request,
'dojo/add_product_meta_data.html',
{'form': form,
'product_tab': product_tab,
'product': prod,
})
@user_is_authorized(Product, Permissions.Product_Edit, 'pid', 'staff')
def edit_meta_data(request, pid):
prod = Product.objects.get(id=pid)
if request.method == 'POST':
for key, value in request.POST.items():
if key.startswith('cfv_'):
cfv_id = int(key.split('_')[1])
cfv = get_object_or_404(DojoMeta, id=cfv_id)
value = value.strip()
if value:
cfv.value = value
cfv.save()
if key.startswith('delete_'):
cfv_id = int(key.split('_')[2])
cfv = get_object_or_404(DojoMeta, id=cfv_id)
cfv.delete()
messages.add_message(request,
messages.SUCCESS,
'Metadata edited successfully.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('view_product', args=(pid,)))
product_tab = Product_Tab(pid, title="Edit Metadata", tab="settings")
return render(request,
'dojo/edit_product_meta_data.html',
{'product': prod,
'product_tab': product_tab,
})
@user_is_authorized(Product, Permissions.Finding_Add, 'pid', 'staff')
def ad_hoc_finding(request, pid):
prod = Product.objects.get(id=pid)
test = None
try:
eng = Engagement.objects.get(product=prod, name="Ad Hoc Engagement")
tests = Test.objects.filter(engagement=eng)
if len(tests) != 0:
test = tests[0]
else:
test = Test(engagement=eng, test_type=Test_Type.objects.get(name="Pen Test"),
target_start=timezone.now(), target_end=timezone.now())
test.save()
except:
eng = Engagement(name="Ad Hoc Engagement", target_start=timezone.now(),
target_end=timezone.now(), active=False, product=prod)
eng.save()
test = Test(engagement=eng, test_type=Test_Type.objects.get(name="Pen Test"),
target_start=timezone.now(), target_end=timezone.now())
test.save()
form_error = False
push_all_jira_issues = jira_helper.is_push_all_issues(test)
jform = None
gform = None
form = AdHocFindingForm(initial={'date': timezone.now().date()}, req_resp=None)
use_jira = jira_helper.get_jira_project(test) is not None
if request.method == 'POST':
form = AdHocFindingForm(request.POST, req_resp=None)
if (form['active'].value() is False or form['false_p'].value()) and form['duplicate'].value() is False:
closing_disabled = Note_Type.objects.filter(is_mandatory=True, is_active=True).count()
if closing_disabled != 0:
error_inactive = ValidationError('Can not set a finding as inactive without adding all mandatory notes',
code='inactive_without_mandatory_notes')
error_false_p = ValidationError(
'Can not set a finding as false positive without adding all mandatory notes',
code='false_p_without_mandatory_notes')
if form['active'].value() is False:
form.add_error('active', error_inactive)
if form['false_p'].value():
form.add_error('false_p', error_false_p)
messages.add_message(request,
messages.ERROR,
'Can not set a finding as inactive or false positive without adding all mandatory notes',
extra_tags='alert-danger')
if use_jira:
jform = JIRAFindingForm(request.POST, prefix='jiraform', push_all=push_all_jira_issues,
jira_project=jira_helper.get_jira_project(test), finding_form=form)
if form.is_valid() and (jform is None or jform.is_valid()):
new_finding = form.save(commit=False)
new_finding.test = test
new_finding.reporter = request.user
new_finding.numerical_severity = Finding.get_numerical_severity(
new_finding.severity)
create_template = new_finding.is_template
# always false now since this will be deprecated soon in favor of new Finding_Template model
new_finding.is_template = False
new_finding.tags = form.cleaned_data['tags']
new_finding.save()
new_finding.endpoints.set(form.cleaned_data['endpoints'])
for endpoint in form.cleaned_data['endpoints']:
eps, created = Endpoint_Status.objects.get_or_create(
finding=new_finding,
endpoint=endpoint)
endpoint.endpoint_status.add(eps)
new_finding.endpoint_status.add(eps)
for endpoint in new_finding.unsaved_endpoints:
ep, created = Endpoint.objects.get_or_create(
protocol=endpoint.protocol,
host=endpoint.host,
path=endpoint.path,
query=endpoint.query,
fragment=endpoint.fragment,
product=test.engagement.product)
eps, created = Endpoint_Status.objects.get_or_create(
finding=new_finding,
endpoint=ep)
ep.endpoint_status.add(eps)
new_finding.endpoints.add(ep)
new_finding.endpoint_status.add(eps)
for endpoint in form.cleaned_data['endpoints']:
ep, created = Endpoint.objects.get_or_create(
protocol=endpoint.protocol,
host=endpoint.host,
path=endpoint.path,
query=endpoint.query,
fragment=endpoint.fragment,
product=test.engagement.product)
eps, created = Endpoint_Status.objects.get_or_create(
finding=new_finding,
endpoint=ep)
ep.endpoint_status.add(eps)
new_finding.endpoints.add(ep)
new_finding.endpoint_status.add(eps)
new_finding.save()
# Push to jira?
push_to_jira = False
jira_message = None
if jform and jform.is_valid():
# Push to Jira?
logger.debug('jira form valid')
push_to_jira = push_all_jira_issues or jform.cleaned_data.get('push_to_jira')
# if the jira issue key was changed, update database
new_jira_issue_key = jform.cleaned_data.get('jira_issue')
if new_finding.has_jira_issue:
jira_issue = new_finding.jira_issue
# everything in DD around JIRA integration is based on the internal id of the issue in JIRA
# instead of on the public jira issue key.
# I have no idea why, but it means we have to retrieve the issue from JIRA to get the internal JIRA id.
# we can assume the issue exist, which is already checked in the validation of the jform
if not new_jira_issue_key:
jira_helper.finding_unlink_jira(request, new_finding)
jira_message = 'Link to JIRA issue removed successfully.'
elif new_jira_issue_key != new_finding.jira_issue.jira_key:
jira_helper.finding_unlink_jira(request, new_finding)
jira_helper.finding_link_jira(request, new_finding, new_jira_issue_key)
jira_message = 'Changed JIRA link successfully.'
else:
logger.debug('finding has no jira issue yet')
if new_jira_issue_key:
logger.debug(
'finding has no jira issue yet, but jira issue specified in request. trying to link.')
jira_helper.finding_link_jira(request, new_finding, new_jira_issue_key)
jira_message = 'Linked a JIRA issue successfully.'
if 'githubform-push_to_github' in request.POST:
gform = GITHUBFindingForm(request.POST, prefix='jiragithub', enabled=push_all_jira_issues)
if gform.is_valid():
add_external_issue(new_finding, 'github')
new_finding.save(push_to_jira=push_to_jira)
if 'request' in form.cleaned_data or 'response' in form.cleaned_data:
burp_rr = BurpRawRequestResponse(
finding=new_finding,
burpRequestBase64=base64.b64encode(form.cleaned_data['request'].encode()),
burpResponseBase64=base64.b64encode(form.cleaned_data['response'].encode()),
)
burp_rr.clean()
burp_rr.save()
messages.add_message(request,
messages.SUCCESS,
'Finding added successfully.',
extra_tags='alert-success')
if create_template:
templates = Finding_Template.objects.filter(title=new_finding.title)
if len(templates) > 0:
messages.add_message(request,
messages.ERROR,
'A finding template was not created. A template with this title already '
'exists.',
extra_tags='alert-danger')
else:
template = Finding_Template(title=new_finding.title,
cwe=new_finding.cwe,
severity=new_finding.severity,
description=new_finding.description,
mitigation=new_finding.mitigation,
impact=new_finding.impact,
references=new_finding.references,
numerical_severity=new_finding.numerical_severity)
template.save()
messages.add_message(request,
messages.SUCCESS,
'A finding template was also created.',
extra_tags='alert-success')
if '_Finished' in request.POST:
return HttpResponseRedirect(reverse('view_test', args=(test.id,)))
else:
return HttpResponseRedirect(reverse('add_findings', args=(test.id,)))
else:
if 'endpoints' in form.cleaned_data:
form.fields['endpoints'].queryset = form.cleaned_data['endpoints']
else:
form.fields['endpoints'].queryset = Endpoint.objects.none()
form_error = True
add_error_message_to_response('The form has errors, please correct them below.')
add_field_errors_to_response(jform)
add_field_errors_to_response(form)
else:
if use_jira:
jform = JIRAFindingForm(push_all=jira_helper.is_push_all_issues(test), prefix='jiraform',
jira_project=jira_helper.get_jira_project(test), finding_form=form)
if get_system_setting('enable_github'):
if GITHUB_PKey.objects.filter(product=test.engagement.product).count() != 0:
gform = GITHUBFindingForm(enabled=push_all_jira_issues, prefix='githubform')
else:
gform = None
product_tab = Product_Tab(pid, title="Add Finding", tab="engagements")
product_tab.setEngagement(eng)
return render(request, 'dojo/ad_hoc_findings.html',
{'form': form,
'product_tab': product_tab,
'temp': False,
'tid': test.id,
'pid': pid,
'form_error': form_error,
'jform': jform,
'gform': gform,
})
@user_is_authorized(Product, Permissions.Product_View, 'pid', 'staff')
def engagement_presets(request, pid):
prod = get_object_or_404(Product, id=pid)
presets = Engagement_Presets.objects.filter(product=prod).all()
product_tab = Product_Tab(prod.id, title="Engagement Presets", tab="settings")
return render(request, 'dojo/view_presets.html',
{'product_tab': product_tab,
'presets': presets,
'prod': prod})
@user_is_authorized(Product, Permissions.Product_Edit, 'pid', 'staff')
def edit_engagement_presets(request, pid, eid):
prod = get_object_or_404(Product, id=pid)
preset = get_object_or_404(Engagement_Presets, id=eid)
product_tab = Product_Tab(prod.id, title="Edit Engagement Preset", tab="settings")
if request.method == 'POST':
tform = EngagementPresetsForm(request.POST, instance=preset)
if tform.is_valid():
tform.save()
messages.add_message(
request,
messages.SUCCESS,
'Engagement Preset Successfully Updated.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('engagement_presets', args=(pid,)))
else:
tform = EngagementPresetsForm(instance=preset)
return render(request, 'dojo/edit_presets.html',
{'product_tab': product_tab,
'tform': tform,
'prod': prod})
@user_is_authorized(Product, Permissions.Product_Edit, 'pid', 'staff')
def add_engagement_presets(request, pid):
prod = get_object_or_404(Product, id=pid)
if request.method == 'POST':
tform = EngagementPresetsForm(request.POST)
if tform.is_valid():
form_copy = tform.save(commit=False)
form_copy.product = prod
form_copy.save()
tform.save_m2m()
messages.add_message(
request,
messages.SUCCESS,
'Engagement Preset Successfully Created.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('engagement_presets', args=(pid,)))
else:
tform = EngagementPresetsForm()
product_tab = Product_Tab(pid, title="New Engagement Preset", tab="settings")
return render(request, 'dojo/new_params.html', {'tform': tform, 'pid': pid, 'product_tab': product_tab})
@user_is_authorized(Product, Permissions.Product_Edit, 'pid', 'staff')
def delete_engagement_presets(request, pid, eid):
prod = get_object_or_404(Product, id=pid)
preset = get_object_or_404(Engagement_Presets, id=eid)
form = DeleteEngagementPresetsForm(instance=preset)
if request.method == 'POST':
if 'id' in request.POST:
form = DeleteEngagementPresetsForm(request.POST, instance=preset)
if form.is_valid():
preset.delete()
messages.add_message(request,
messages.SUCCESS,
'Engagement presets and engagement relationships removed.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('engagement_presets', args=(pid,)))
collector = NestedObjects(using=DEFAULT_DB_ALIAS)
collector.collect([preset])
rels = collector.nested()
product_tab = Product_Tab(pid, title="Delete Engagement Preset", tab="settings")
return render(request, 'dojo/delete_presets.html',
{'product': product,
'form': form,
'product_tab': product_tab,
'rels': rels,
})
@user_is_authorized(Product, Permissions.Product_View, 'pid', 'view')
def edit_notifications(request, pid):
prod = get_object_or_404(Product, id=pid)
if request.method == 'POST':
product_notifications = Notifications.objects.filter(user=request.user).filter(product=prod).first()
if not product_notifications:
product_notifications = Notifications(user=request.user, product=prod)
logger.debug('no existing product notifications found')
else:
logger.debug('existing product notifications found')
form = ProductNotificationsForm(request.POST, instance=product_notifications)
# print(vars(form))
if form.is_valid():
form.save()
messages.add_message(request,
messages.SUCCESS,
'Notification settings updated.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('view_product', args=(pid,)))
@user_is_authorized(Product, Permissions.Product_Manage_Members, 'pid')
def add_product_member(request, pid):
product = get_object_or_404(Product, pk=pid)
memberform = Add_Product_MemberForm(initial={'product': product.id})
if request.method == 'POST':
memberform = Add_Product_MemberForm(request.POST, initial={'product': product.id})
if memberform.is_valid():
members = Product_Member.objects.filter(product=product, user=memberform.instance.user)
if members.count() > 0:
messages.add_message(request,
messages.WARNING,
'Product member already exists.',
extra_tags='alert-warning')
elif memberform.instance.role == Roles.Owner and not user_has_permission(request.user, product, Permissions.Product_Member_Add_Owner):
messages.add_message(request,
messages.WARNING,
'You are not permitted to add users as owners.',
extra_tags='alert-warning')
else:
memberform.save()
messages.add_message(request,
messages.SUCCESS,
'Product member added successfully.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('view_product', args=(pid, )))
product_tab = Product_Tab(pid, title="Add Product Member", tab="settings")
return render(request, 'dojo/new_product_member.html', {
'product': product,
'form': memberform,
'product_tab': product_tab,
})
@user_is_authorized(Product_Member, Permissions.Product_Manage_Members, 'memberid')
def edit_product_member(request, memberid):
member = get_object_or_404(Product_Member, pk=memberid)
memberform = Edit_Product_MemberForm(instance=member)
if request.method == 'POST':
memberform = Edit_Product_MemberForm(request.POST, instance=member)
if memberform.is_valid():
if member.role == Roles.Owner and not user_has_permission(request.user, member.product, Permissions.Product_Member_Add_Owner):
messages.add_message(request,
messages.WARNING,
'You are not permitted to make users to owners.',
extra_tags='alert-warning')
else:
memberform.save()
messages.add_message(request,
messages.SUCCESS,
'Product member updated successfully.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('view_product', args=(member.product.id, )))
product_tab = Product_Tab(member.product.id, title="Edit Product Member", tab="settings")
return render(request, 'dojo/edit_product_member.html', {
'memberid': memberid,
'form': memberform,
'product_tab': product_tab,
})
@user_is_authorized(Product_Member, Permissions.Product_Remove_Member, 'memberid')
def delete_product_member(request, memberid):
member = get_object_or_404(Product_Member, pk=memberid)
memberform = Delete_Product_MemberForm(instance=member)
if request.method == 'POST':
memberform = Delete_Product_MemberForm(request.POST, instance=member)
member = memberform.instance
user = member.user
member.delete()
messages.add_message(request,
messages.SUCCESS,
'Product member deleted successfully.',
extra_tags='alert-success')
if user == request.user:
return HttpResponseRedirect(reverse('product'))
else:
return HttpResponseRedirect(reverse('view_product', args=(member.product.id, )))
product_tab = Product_Tab(member.product.id, title="Delete Product Member", tab="settings")
return render(request, 'dojo/delete_product_member.html', {
'memberid': memberid,
'form': memberform,
'product_tab': product_tab,
})
|
py | b4163c265703c8f979baafb767cbbd1d5ee610ab | #
# UDP server
#
# import packages
import socket
import json
# define IP and PORT
IP = '127.0.0.1'
PORT = 8888
# bytes chunk to recieve
BYTES = 1024
# create server side socket
server_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# bind server side socket to IP and PORT
server_socket.bind((IP, PORT))
# available boards
boards = []
for index in range(5):
boards.append( {
'id': index + 1,
'red': False, # not "connected"
'black': False, # not "connected"
'moves': []
})
# listen to incoming requests
while True:
# receive request from client
client_data, credentials = server_socket.recvfrom(BYTES)
# parse client data
client_data = json.loads(client_data.decode())
print('client:', client_data, credentials)
# init game
try:
game = boards[int(client_data['gameId']) - 1]
except Exception as e:
print(e)
server_socket.sendto(b'board does not exist!', (credentials))
continue
if client_data['move'] == 'connect':
game[client_data['side']] = True
elif client_data['move'] == 'disconnect':
boards[int(client_data['gameId']) - 1] = {
'id': int(client_data['gameId']),
'red': False, # not "connected"
'black': False, # not "connected"
'moves': []
}
elif client_data['move'] != 'get':
game['moves'].append(int(client_data['move']))
print('got move from UI')
# send response to client
server_socket.sendto(str.encode(json.dumps(game)), (credentials))
print('sent response to client', credentials)
# close socket
server_socket.close()
|
py | b4163d4135ac08594560b52eb317e9edee968b7f | """
count - itertools
"""
from itertools import count
# O contador count não tem fim
# O contador pra para num laço for usando um if com break
contador = count(start=9, step=-1)
# start onde começa a contar
# step para pular ex: 2, 4, 6, 8 e da pra usar pra contar ao contrario usando -1
for c in contador:
print(round(c, 2))
# round casas decimais
if c >= 10 or c <= -10:
break
print('#' * 20)
contador1 = count()
lista = ['Pedro', 'Henrique', 'Paula']
lista = zip(contador1, lista)
print(list(lista))
|
py | b4163dd78ac1251f4ebc5425c74d35b448b352fe | from astropy.utils.data import get_pkg_data_filename
from astropy.io import fits
from matplotlib import pyplot as plt
from regions import read_ds9
image_file = get_pkg_data_filename('tutorials/FITS-images/HorseHead.fits')
print(image_file)
image_data = fits.getdata(image_file, ext=0)
ax = plt.gca()
plt.imshow(image_data, cmap='gray')
print(ax.get_xlim(), ax.get_ylim())
ax.set_ylim([-0.5, 892.5])
regs = read_ds9('plot_image.reg')
for i, reg in enumerate(regs):
reg.plot(ax=ax)
plt.show()
|
py | b4163f61f8c6c76db7977c787a9d7baea6a42f32 | # File: mcmlib.py
|
py | b4163f93567336e56f036863d773b95939b06849 | #!/usr/bin/env python3
import os
import subprocess
import optparse
CUR_DIR = os.path.abspath(os.path.dirname(__file__))
''' This is used for generating stdio output of multiple files in
a directory or a single file. Only specify the directory and
it will generate the file there itself.
'''
class PerfStdio(object):
def __init__(self, dir_name, out_dir_name = None):
self.dir_name = os.path.join(CUR_DIR, dir_name)
if out_dir_name is None:
self.out_dir_name = self.dir_name
else:
self.out_dir_name = self.out_dir_name
self.out_dir_name = os.path.join(CUR_DIR, self.out_dir_name)
def gen_stdio(self):
if (not os.path.isdir(self.out_dir_name)):
os.mkdir(self.out_dir_name)
for f in os.listdir(self.dir_name):
if f.endswith(("data")):
print("parsing %s ..." % f)
stdio_filename = f + ".stdio.txt"
cmd_arg = "sudo perf report -f --stdio -i %s" % ( \
os.path.join(self.dir_name, f))
out_fd = open(os.path.join(self.out_dir_name, stdio_filename), "w")
self._exec_cmd(cmd_arg, out_fd)
out_fd.close()
print("parsing %s ... done" % f)
def _exec_cmd(self, cmd, out=subprocess.STDOUT):
p = subprocess.Popen(cmd, shell=True,
stdout=out, stderr=subprocess.PIPE)
p.wait()
if out is not subprocess.STDOUT:
out.flush()
return p
def __print_usage():
print("Usage: perfstdio.py --dir [directory]")
print(" --out [output directory]")
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option("--dir", help="perf data directory")
parser.add_option("--out", help="perf stdio output directory")
(opt, args) = parser.parse_args()
if opt.dir is None:
__print_usage()
exit(1)
perfstdio = PerfStdio(opt.dir, opt.out)
perfstdio.gen_stdio()
|
py | b4164001c47083c9e1ce92218daef8bb0734c59f | from chatterbot.logic import LogicAdapter
from nltk import pos_tag
from sugaroid.brain.constants import (
WHO_AM_I,
WHO_ARE_YOU,
SUGAROID,
HOW_DO_YOU_FEEL,
HOW_DO_I_FEEL,
HOW_DO_HE_FEEL,
)
from sugaroid.brain.ooo import Emotion
from sugaroid.brain.postprocessor import random_response
from sugaroid.brain.preprocessors import normalize, spac_token
from sugaroid.core.statement import SugaroidStatement
class FeelAdapter(LogicAdapter):
"""
Handles sentences containing the word feel
"""
def __init__(self, chatbot, **kwargs):
super().__init__(chatbot, **kwargs)
def can_process(self, statement):
self.normalized = normalize(str(statement))
self.token = pos_tag(self.normalized)
if "feel" in self.normalized:
return True
else:
return False
def process(self, statement, additional_response_selection_parameters=None):
confidence = 0.9
# FIXME Creates unusual response
nn = False
it = False
token = spac_token(statement, chatbot=self.chatbot)
for i in token:
if (i.tag_ == "NNP") or (i.tag_ == "NN"):
nn = True
if i.lower_ == "it":
it = True
if nn and not it:
response = random_response(HOW_DO_HE_FEEL)
emotion = Emotion.seriously
elif it:
response = "Ask it!"
emotion = Emotion.o
elif "I" in self.normalized:
emotion = Emotion.depressed
response = random_response(HOW_DO_I_FEEL)
else:
emotion = Emotion.blush
response = random_response(HOW_DO_YOU_FEEL)
selected_statement = SugaroidStatement(response, chatbot=True)
selected_statement.confidence = confidence
selected_statement.emotion = emotion
return selected_statement
|
py | b416415b3a6cbddc4016544be3ea42a97d4de9bf | """ Cisco_IOS_XR_infra_xtc_agent_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR infra\-xtc\-agent package operational data.
This module contains definitions
for the following management objects\:
pcc\: Path\-computation client in XTC
xtc\: xtc
Copyright (c) 2013\-2017 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class XtcAfId(Enum):
"""
XtcAfId (Enum Class)
Xtc af id
.. data:: none = 0
None
.. data:: ipv4 = 1
IPv4
.. data:: ipv6 = 2
IPv6
"""
none = Enum.YLeaf(0, "none")
ipv4 = Enum.YLeaf(1, "ipv4")
ipv6 = Enum.YLeaf(2, "ipv6")
class XtcBsidError(Enum):
"""
XtcBsidError (Enum Class)
Xtc bsid error
.. data:: none = 0
No error
.. data:: allocating = 1
Error allocating via LSD
.. data:: exists = 2
Explicitly configured BSID already exists
.. data:: internal = 3
Internal error
.. data:: color_endpoint_exists = 4
Configured BSID used by another color/end-point
.. data:: forwarding_rewrite_error = 5
BSID Forwarding rewrite (label xconnect) failed
.. data:: srlb_invalid_label = 6
BSID not valid within SRLB range
"""
none = Enum.YLeaf(0, "none")
allocating = Enum.YLeaf(1, "allocating")
exists = Enum.YLeaf(2, "exists")
internal = Enum.YLeaf(3, "internal")
color_endpoint_exists = Enum.YLeaf(4, "color-endpoint-exists")
forwarding_rewrite_error = Enum.YLeaf(5, "forwarding-rewrite-error")
srlb_invalid_label = Enum.YLeaf(6, "srlb-invalid-label")
class XtcBsidMode(Enum):
"""
XtcBsidMode (Enum Class)
XTC BSID MODE type
.. data:: explicit = 0
Explicit binding SID
.. data:: dynamic = 1
Dynamic binding SID
"""
explicit = Enum.YLeaf(0, "explicit")
dynamic = Enum.YLeaf(1, "dynamic")
class XtcDisjointness(Enum):
"""
XtcDisjointness (Enum Class)
XTC policy path type
.. data:: no_disjointness = 0
No Disjointness
.. data:: link_disjointness = 1
Link disjointness
.. data:: node_disjointness = 2
Node disjointness
.. data:: srlg_disjointness = 3
SRLG disjointness
.. data:: srlg_node_disjointness = 4
SRLG-Node disjointness
"""
no_disjointness = Enum.YLeaf(0, "no-disjointness")
link_disjointness = Enum.YLeaf(1, "link-disjointness")
node_disjointness = Enum.YLeaf(2, "node-disjointness")
srlg_disjointness = Enum.YLeaf(3, "srlg-disjointness")
srlg_node_disjointness = Enum.YLeaf(4, "srlg-node-disjointness")
class XtcIgpInfoId(Enum):
"""
XtcIgpInfoId (Enum Class)
IGP IDs
.. data:: isis = 1
ISIS
.. data:: ospf = 2
OSPF
.. data:: bgp = 3
BGP
"""
isis = Enum.YLeaf(1, "isis")
ospf = Enum.YLeaf(2, "ospf")
bgp = Enum.YLeaf(3, "bgp")
class XtcPolicyPath(Enum):
"""
XtcPolicyPath (Enum Class)
XTC policy path type
.. data:: explicit = 0
Explicit path
.. data:: dynamic = 1
Dynamic path
.. data:: dynamic_pce = 2
Dynamic PCE-based path
"""
explicit = Enum.YLeaf(0, "explicit")
dynamic = Enum.YLeaf(1, "dynamic")
dynamic_pce = Enum.YLeaf(2, "dynamic-pce")
class XtcSid(Enum):
"""
XtcSid (Enum Class)
Xtc sid
.. data:: none = 0
None
.. data:: mpls = 1
MPLS
.. data:: ipv6 = 2
IPv6
"""
none = Enum.YLeaf(0, "none")
mpls = Enum.YLeaf(1, "mpls")
ipv6 = Enum.YLeaf(2, "ipv6")
class XtcSid1(Enum):
"""
XtcSid1 (Enum Class)
XTC SID Types
.. data:: sr_protected_adj_sid = 1
Protected Adjacency SID
.. data:: sr_unprotected_adj_sid = 2
Unprotected Adjacency SID
.. data:: sr_bgp_egress_peer_engineering_sid = 3
BGP egress peer engineering SID
.. data:: sr_reqular_prefix_sid = 4
Regular prefix SID
.. data:: sr_strict_prefix_sid = 5
Strict prefix SID
"""
sr_protected_adj_sid = Enum.YLeaf(1, "sr-protected-adj-sid")
sr_unprotected_adj_sid = Enum.YLeaf(2, "sr-unprotected-adj-sid")
sr_bgp_egress_peer_engineering_sid = Enum.YLeaf(3, "sr-bgp-egress-peer-engineering-sid")
sr_reqular_prefix_sid = Enum.YLeaf(4, "sr-reqular-prefix-sid")
sr_strict_prefix_sid = Enum.YLeaf(5, "sr-strict-prefix-sid")
class XtcSrSid(Enum):
"""
XtcSrSid (Enum Class)
XTC SR SID type
.. data:: ipv4_node_sid = 0
IPv4 Node SID
.. data:: ipv4_adjacency_sid = 1
IPv4 Adjacency SID
.. data:: unknown_sid = 2
Unknown SID
"""
ipv4_node_sid = Enum.YLeaf(0, "ipv4-node-sid")
ipv4_adjacency_sid = Enum.YLeaf(1, "ipv4-adjacency-sid")
unknown_sid = Enum.YLeaf(2, "unknown-sid")
class Pcc(Entity):
"""
Path\-computation client in XTC
.. attribute:: plsps
PCC PLSP database in XTC
**type**\: :py:class:`Plsps <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps>`
.. attribute:: peers
PCC peer database in XTC
**type**\: :py:class:`Peers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Peers>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc, self).__init__()
self._top_entity = None
self.yang_name = "pcc"
self.yang_parent_name = "Cisco-IOS-XR-infra-xtc-agent-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("plsps", ("plsps", Pcc.Plsps)), ("peers", ("peers", Pcc.Peers))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.plsps = Pcc.Plsps()
self.plsps.parent = self
self._children_name_map["plsps"] = "plsps"
self._children_yang_names.add("plsps")
self.peers = Pcc.Peers()
self.peers.parent = self
self._children_name_map["peers"] = "peers"
self._children_yang_names.add("peers")
self._segment_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:pcc"
class Plsps(Entity):
"""
PCC PLSP database in XTC
.. attribute:: plsp
PCC PLSP information
**type**\: list of :py:class:`Plsp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps, self).__init__()
self.yang_name = "plsps"
self.yang_parent_name = "pcc"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("plsp", ("plsp", Pcc.Plsps.Plsp))])
self._leafs = OrderedDict()
self.plsp = YList(self)
self._segment_path = lambda: "plsps"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:pcc/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps, [], name, value)
class Plsp(Entity):
"""
PCC PLSP information
.. attribute:: plsp_id (key)
PLSP ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: stats
Stats
**type**\: :py:class:`Stats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.Stats>`
.. attribute:: plsp_id_xr
PLSP ID
**type**\: int
**range:** 0..4294967295
.. attribute:: sym_path_name
Symbolic Path Name
**type**\: str
.. attribute:: refcnt
Refcnt
**type**\: int
**range:** \-9223372036854775808..9223372036854775807
.. attribute:: conn_delegated_to
CONN delegated to
**type**\: int
**range:** 0..4294967295
.. attribute:: event_history
event history
**type**\: list of :py:class:`EventHistory <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.EventHistory>`
.. attribute:: path
path
**type**\: list of :py:class:`Path <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.Path>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp, self).__init__()
self.yang_name = "plsp"
self.yang_parent_name = "plsps"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['plsp_id']
self._child_container_classes = OrderedDict([("stats", ("stats", Pcc.Plsps.Plsp.Stats))])
self._child_list_classes = OrderedDict([("event-history", ("event_history", Pcc.Plsps.Plsp.EventHistory)), ("path", ("path", Pcc.Plsps.Plsp.Path))])
self._leafs = OrderedDict([
('plsp_id', YLeaf(YType.int32, 'plsp-id')),
('plsp_id_xr', YLeaf(YType.uint32, 'plsp-id-xr')),
('sym_path_name', YLeaf(YType.str, 'sym-path-name')),
('refcnt', YLeaf(YType.int64, 'refcnt')),
('conn_delegated_to', YLeaf(YType.uint32, 'conn-delegated-to')),
])
self.plsp_id = None
self.plsp_id_xr = None
self.sym_path_name = None
self.refcnt = None
self.conn_delegated_to = None
self.stats = Pcc.Plsps.Plsp.Stats()
self.stats.parent = self
self._children_name_map["stats"] = "stats"
self._children_yang_names.add("stats")
self.event_history = YList(self)
self.path = YList(self)
self._segment_path = lambda: "plsp" + "[plsp-id='" + str(self.plsp_id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:pcc/plsps/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp, ['plsp_id', 'plsp_id_xr', 'sym_path_name', 'refcnt', 'conn_delegated_to'], name, value)
class Stats(Entity):
"""
Stats
.. attribute:: paths_created
Paths Created
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: paths_destroyed
Paths Destroyed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: path_create_errors
Path create errors
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: path_destroy_errors
Path destroy errors
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: requests_created
Requests created
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: requests_destroyed
Requests destroyed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: requests_failed
Requests failed
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.Stats, self).__init__()
self.yang_name = "stats"
self.yang_parent_name = "plsp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('paths_created', YLeaf(YType.uint64, 'paths-created')),
('paths_destroyed', YLeaf(YType.uint64, 'paths-destroyed')),
('path_create_errors', YLeaf(YType.uint64, 'path-create-errors')),
('path_destroy_errors', YLeaf(YType.uint64, 'path-destroy-errors')),
('requests_created', YLeaf(YType.uint64, 'requests-created')),
('requests_destroyed', YLeaf(YType.uint64, 'requests-destroyed')),
('requests_failed', YLeaf(YType.uint64, 'requests-failed')),
])
self.paths_created = None
self.paths_destroyed = None
self.path_create_errors = None
self.path_destroy_errors = None
self.requests_created = None
self.requests_destroyed = None
self.requests_failed = None
self._segment_path = lambda: "stats"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.Stats, ['paths_created', 'paths_destroyed', 'path_create_errors', 'path_destroy_errors', 'requests_created', 'requests_destroyed', 'requests_failed'], name, value)
class EventHistory(Entity):
"""
event history
.. attribute:: ts
Timestamp
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: desc
Description
**type**\: str
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.EventHistory, self).__init__()
self.yang_name = "event-history"
self.yang_parent_name = "plsp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('ts', YLeaf(YType.uint64, 'ts')),
('desc', YLeaf(YType.str, 'desc')),
])
self.ts = None
self.desc = None
self._segment_path = lambda: "event-history"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.EventHistory, ['ts', 'desc'], name, value)
class Path(Entity):
"""
path
.. attribute:: stats
stats
**type**\: :py:class:`Stats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.Path.Stats>`
.. attribute:: used_bw
used bw
**type**\: int
**range:** \-9223372036854775808..9223372036854775807
.. attribute:: requested_bw
requested bw
**type**\: int
**range:** \-9223372036854775808..9223372036854775807
.. attribute:: metric_value
metric value
**type**\: int
**range:** \-9223372036854775808..9223372036854775807
.. attribute:: refcnt
refcnt
**type**\: int
**range:** \-9223372036854775808..9223372036854775807
.. attribute:: lsp_plsp_id
LSP PLSP ID
**type**\: int
**range:** 0..4294967295
.. attribute:: binding_sid_value
Binding SID
**type**\: int
**range:** 0..4294967295
.. attribute:: lsp_id_tlv_ext_tunnel_id
Ext Tun ID
**type**\: int
**range:** 0..4294967295
.. attribute:: lsp_id_tlv_tunnel_endpoint_address
Tun endpoint address
**type**\: int
**range:** 0..4294967295
.. attribute:: lsp_id_tlv_tunnel_sender_address
Tun sender address
**type**\: int
**range:** 0..4294967295
.. attribute:: srp_id
SRP ID
**type**\: int
**range:** 0..4294967295
.. attribute:: lsp_id_tlv_lsp_id
LSP ID
**type**\: int
**range:** 0..65535
.. attribute:: lsp_id_tlv_tunnel_id
Tunnel ID
**type**\: int
**range:** 0..65535
.. attribute:: lsp_id
Application LSP ID
**type**\: int
**range:** 0..65535
.. attribute:: binding_sid_type
Binding SID type
**type**\: int
**range:** 0..65535
.. attribute:: lsp_oper
LSP oper flags
**type**\: int
**range:** 0..255
.. attribute:: path_setup_type
Path setup type
**type**\: int
**range:** 0..255
.. attribute:: metric_type
Metric type
**type**\: int
**range:** 0..255
.. attribute:: is_reported
is reported
**type**\: bool
.. attribute:: lsp_a_flag
LSP A Flag
**type**\: bool
.. attribute:: lsp_r_flag
LSP R Flag
**type**\: bool
.. attribute:: lsp_s_flag
LSP S Flag
**type**\: bool
.. attribute:: lsp_d_flag
LSP D Flag
**type**\: bool
.. attribute:: lsp_c_flag
LSP C Flag
**type**\: bool
.. attribute:: ero_hop
ero hop
**type**\: list of :py:class:`EroHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.Path.EroHop>`
.. attribute:: rro_hop
rro hop
**type**\: list of :py:class:`RroHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.Path.RroHop>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.Path, self).__init__()
self.yang_name = "path"
self.yang_parent_name = "plsp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("stats", ("stats", Pcc.Plsps.Plsp.Path.Stats))])
self._child_list_classes = OrderedDict([("ero-hop", ("ero_hop", Pcc.Plsps.Plsp.Path.EroHop)), ("rro-hop", ("rro_hop", Pcc.Plsps.Plsp.Path.RroHop))])
self._leafs = OrderedDict([
('used_bw', YLeaf(YType.int64, 'used-bw')),
('requested_bw', YLeaf(YType.int64, 'requested-bw')),
('metric_value', YLeaf(YType.int64, 'metric-value')),
('refcnt', YLeaf(YType.int64, 'refcnt')),
('lsp_plsp_id', YLeaf(YType.uint32, 'lsp-plsp-id')),
('binding_sid_value', YLeaf(YType.uint32, 'binding-sid-value')),
('lsp_id_tlv_ext_tunnel_id', YLeaf(YType.uint32, 'lsp-id-tlv-ext-tunnel-id')),
('lsp_id_tlv_tunnel_endpoint_address', YLeaf(YType.uint32, 'lsp-id-tlv-tunnel-endpoint-address')),
('lsp_id_tlv_tunnel_sender_address', YLeaf(YType.uint32, 'lsp-id-tlv-tunnel-sender-address')),
('srp_id', YLeaf(YType.uint32, 'srp-id')),
('lsp_id_tlv_lsp_id', YLeaf(YType.uint16, 'lsp-id-tlv-lsp-id')),
('lsp_id_tlv_tunnel_id', YLeaf(YType.uint16, 'lsp-id-tlv-tunnel-id')),
('lsp_id', YLeaf(YType.uint16, 'lsp-id')),
('binding_sid_type', YLeaf(YType.uint16, 'binding-sid-type')),
('lsp_oper', YLeaf(YType.uint8, 'lsp-oper')),
('path_setup_type', YLeaf(YType.uint8, 'path-setup-type')),
('metric_type', YLeaf(YType.uint8, 'metric-type')),
('is_reported', YLeaf(YType.boolean, 'is-reported')),
('lsp_a_flag', YLeaf(YType.boolean, 'lsp-a-flag')),
('lsp_r_flag', YLeaf(YType.boolean, 'lsp-r-flag')),
('lsp_s_flag', YLeaf(YType.boolean, 'lsp-s-flag')),
('lsp_d_flag', YLeaf(YType.boolean, 'lsp-d-flag')),
('lsp_c_flag', YLeaf(YType.boolean, 'lsp-c-flag')),
])
self.used_bw = None
self.requested_bw = None
self.metric_value = None
self.refcnt = None
self.lsp_plsp_id = None
self.binding_sid_value = None
self.lsp_id_tlv_ext_tunnel_id = None
self.lsp_id_tlv_tunnel_endpoint_address = None
self.lsp_id_tlv_tunnel_sender_address = None
self.srp_id = None
self.lsp_id_tlv_lsp_id = None
self.lsp_id_tlv_tunnel_id = None
self.lsp_id = None
self.binding_sid_type = None
self.lsp_oper = None
self.path_setup_type = None
self.metric_type = None
self.is_reported = None
self.lsp_a_flag = None
self.lsp_r_flag = None
self.lsp_s_flag = None
self.lsp_d_flag = None
self.lsp_c_flag = None
self.stats = Pcc.Plsps.Plsp.Path.Stats()
self.stats.parent = self
self._children_name_map["stats"] = "stats"
self._children_yang_names.add("stats")
self.ero_hop = YList(self)
self.rro_hop = YList(self)
self._segment_path = lambda: "path"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.Path, ['used_bw', 'requested_bw', 'metric_value', 'refcnt', 'lsp_plsp_id', 'binding_sid_value', 'lsp_id_tlv_ext_tunnel_id', 'lsp_id_tlv_tunnel_endpoint_address', 'lsp_id_tlv_tunnel_sender_address', 'srp_id', 'lsp_id_tlv_lsp_id', 'lsp_id_tlv_tunnel_id', 'lsp_id', 'binding_sid_type', 'lsp_oper', 'path_setup_type', 'metric_type', 'is_reported', 'lsp_a_flag', 'lsp_r_flag', 'lsp_s_flag', 'lsp_d_flag', 'lsp_c_flag'], name, value)
class Stats(Entity):
"""
stats
.. attribute:: reports_requested
Reports requested
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: reports_sent
Reports sent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: reports_failed_to_send
Reports failed
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.Path.Stats, self).__init__()
self.yang_name = "stats"
self.yang_parent_name = "path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('reports_requested', YLeaf(YType.uint64, 'reports-requested')),
('reports_sent', YLeaf(YType.uint64, 'reports-sent')),
('reports_failed_to_send', YLeaf(YType.uint64, 'reports-failed-to-send')),
])
self.reports_requested = None
self.reports_sent = None
self.reports_failed_to_send = None
self._segment_path = lambda: "stats"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.Path.Stats, ['reports_requested', 'reports_sent', 'reports_failed_to_send'], name, value)
class EroHop(Entity):
"""
ero hop
.. attribute:: data
data
**type**\: :py:class:`Data <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.Path.EroHop.Data>`
.. attribute:: loose
is loose hop
**type**\: bool
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.Path.EroHop, self).__init__()
self.yang_name = "ero-hop"
self.yang_parent_name = "path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("data", ("data", Pcc.Plsps.Plsp.Path.EroHop.Data))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('loose', YLeaf(YType.boolean, 'loose')),
])
self.loose = None
self.data = Pcc.Plsps.Plsp.Path.EroHop.Data()
self.data.parent = self
self._children_name_map["data"] = "data"
self._children_yang_names.add("data")
self._segment_path = lambda: "ero-hop"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.Path.EroHop, ['loose'], name, value)
class Data(Entity):
"""
data
.. attribute:: ipv4
IPv4 hop info
**type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.Path.EroHop.Data.Ipv4>`
.. attribute:: sr_v4
SR IPv4 hop info
**type**\: :py:class:`SrV4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.Path.EroHop.Data.SrV4>`
.. attribute:: hop_type
HopType
**type**\: int
**range:** 0..255
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.Path.EroHop.Data, self).__init__()
self.yang_name = "data"
self.yang_parent_name = "ero-hop"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("ipv4", ("ipv4", Pcc.Plsps.Plsp.Path.EroHop.Data.Ipv4)), ("sr-v4", ("sr_v4", Pcc.Plsps.Plsp.Path.EroHop.Data.SrV4))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('hop_type', YLeaf(YType.uint8, 'hop-type')),
])
self.hop_type = None
self.ipv4 = Pcc.Plsps.Plsp.Path.EroHop.Data.Ipv4()
self.ipv4.parent = self
self._children_name_map["ipv4"] = "ipv4"
self._children_yang_names.add("ipv4")
self.sr_v4 = Pcc.Plsps.Plsp.Path.EroHop.Data.SrV4()
self.sr_v4.parent = self
self._children_name_map["sr_v4"] = "sr-v4"
self._children_yang_names.add("sr-v4")
self._segment_path = lambda: "data"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.Path.EroHop.Data, ['hop_type'], name, value)
class Ipv4(Entity):
"""
IPv4 hop info
.. attribute:: v4_addr
IPv4 prefix
**type**\: int
**range:** 0..4294967295
.. attribute:: prefix_len
Prefix length
**type**\: int
**range:** 0..255
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.Path.EroHop.Data.Ipv4, self).__init__()
self.yang_name = "ipv4"
self.yang_parent_name = "data"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('v4_addr', YLeaf(YType.uint32, 'v4-addr')),
('prefix_len', YLeaf(YType.uint8, 'prefix-len')),
])
self.v4_addr = None
self.prefix_len = None
self._segment_path = lambda: "ipv4"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.Path.EroHop.Data.Ipv4, ['v4_addr', 'prefix_len'], name, value)
class SrV4(Entity):
"""
SR IPv4 hop info
.. attribute:: type
SID type
**type**\: int
**range:** 0..255
.. attribute:: cflag
C flag
**type**\: bool
.. attribute:: sid
SID
**type**\: int
**range:** 0..4294967295
.. attribute:: remote_addr
Remote address
**type**\: int
**range:** 0..4294967295
.. attribute:: local_addr
Local address
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.Path.EroHop.Data.SrV4, self).__init__()
self.yang_name = "sr-v4"
self.yang_parent_name = "data"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', YLeaf(YType.uint8, 'type')),
('cflag', YLeaf(YType.boolean, 'cflag')),
('sid', YLeaf(YType.uint32, 'sid')),
('remote_addr', YLeaf(YType.uint32, 'remote-addr')),
('local_addr', YLeaf(YType.uint32, 'local-addr')),
])
self.type = None
self.cflag = None
self.sid = None
self.remote_addr = None
self.local_addr = None
self._segment_path = lambda: "sr-v4"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.Path.EroHop.Data.SrV4, ['type', 'cflag', 'sid', 'remote_addr', 'local_addr'], name, value)
class RroHop(Entity):
"""
rro hop
.. attribute:: data
data
**type**\: :py:class:`Data <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.Path.RroHop.Data>`
.. attribute:: loose
is loose hop
**type**\: bool
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.Path.RroHop, self).__init__()
self.yang_name = "rro-hop"
self.yang_parent_name = "path"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("data", ("data", Pcc.Plsps.Plsp.Path.RroHop.Data))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('loose', YLeaf(YType.boolean, 'loose')),
])
self.loose = None
self.data = Pcc.Plsps.Plsp.Path.RroHop.Data()
self.data.parent = self
self._children_name_map["data"] = "data"
self._children_yang_names.add("data")
self._segment_path = lambda: "rro-hop"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.Path.RroHop, ['loose'], name, value)
class Data(Entity):
"""
data
.. attribute:: ipv4
IPv4 hop info
**type**\: :py:class:`Ipv4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.Path.RroHop.Data.Ipv4>`
.. attribute:: sr_v4
SR IPv4 hop info
**type**\: :py:class:`SrV4 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Plsps.Plsp.Path.RroHop.Data.SrV4>`
.. attribute:: hop_type
HopType
**type**\: int
**range:** 0..255
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.Path.RroHop.Data, self).__init__()
self.yang_name = "data"
self.yang_parent_name = "rro-hop"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("ipv4", ("ipv4", Pcc.Plsps.Plsp.Path.RroHop.Data.Ipv4)), ("sr-v4", ("sr_v4", Pcc.Plsps.Plsp.Path.RroHop.Data.SrV4))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('hop_type', YLeaf(YType.uint8, 'hop-type')),
])
self.hop_type = None
self.ipv4 = Pcc.Plsps.Plsp.Path.RroHop.Data.Ipv4()
self.ipv4.parent = self
self._children_name_map["ipv4"] = "ipv4"
self._children_yang_names.add("ipv4")
self.sr_v4 = Pcc.Plsps.Plsp.Path.RroHop.Data.SrV4()
self.sr_v4.parent = self
self._children_name_map["sr_v4"] = "sr-v4"
self._children_yang_names.add("sr-v4")
self._segment_path = lambda: "data"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.Path.RroHop.Data, ['hop_type'], name, value)
class Ipv4(Entity):
"""
IPv4 hop info
.. attribute:: v4_addr
IPv4 prefix
**type**\: int
**range:** 0..4294967295
.. attribute:: prefix_len
Prefix length
**type**\: int
**range:** 0..255
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.Path.RroHop.Data.Ipv4, self).__init__()
self.yang_name = "ipv4"
self.yang_parent_name = "data"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('v4_addr', YLeaf(YType.uint32, 'v4-addr')),
('prefix_len', YLeaf(YType.uint8, 'prefix-len')),
])
self.v4_addr = None
self.prefix_len = None
self._segment_path = lambda: "ipv4"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.Path.RroHop.Data.Ipv4, ['v4_addr', 'prefix_len'], name, value)
class SrV4(Entity):
"""
SR IPv4 hop info
.. attribute:: type
SID type
**type**\: int
**range:** 0..255
.. attribute:: cflag
C flag
**type**\: bool
.. attribute:: sid
SID
**type**\: int
**range:** 0..4294967295
.. attribute:: remote_addr
Remote address
**type**\: int
**range:** 0..4294967295
.. attribute:: local_addr
Local address
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Plsps.Plsp.Path.RroHop.Data.SrV4, self).__init__()
self.yang_name = "sr-v4"
self.yang_parent_name = "data"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('type', YLeaf(YType.uint8, 'type')),
('cflag', YLeaf(YType.boolean, 'cflag')),
('sid', YLeaf(YType.uint32, 'sid')),
('remote_addr', YLeaf(YType.uint32, 'remote-addr')),
('local_addr', YLeaf(YType.uint32, 'local-addr')),
])
self.type = None
self.cflag = None
self.sid = None
self.remote_addr = None
self.local_addr = None
self._segment_path = lambda: "sr-v4"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Plsps.Plsp.Path.RroHop.Data.SrV4, ['type', 'cflag', 'sid', 'remote_addr', 'local_addr'], name, value)
class Peers(Entity):
"""
PCC peer database in XTC
.. attribute:: peer
PCC peer information
**type**\: list of :py:class:`Peer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Peers.Peer>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Peers, self).__init__()
self.yang_name = "peers"
self.yang_parent_name = "pcc"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("peer", ("peer", Pcc.Peers.Peer))])
self._leafs = OrderedDict()
self.peer = YList(self)
self._segment_path = lambda: "peers"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:pcc/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Peers, [], name, value)
class Peer(Entity):
"""
PCC peer information
.. attribute:: peer_addr (key)
Peer Address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: socket_info
socket info
**type**\: :py:class:`SocketInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Peers.Peer.SocketInfo>`
.. attribute:: stats
stats
**type**\: :py:class:`Stats <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Pcc.Peers.Peer.Stats>`
.. attribute:: handle
internal handle
**type**\: int
**range:** 0..4294967295
.. attribute:: state_str
connection state
**type**\: str
.. attribute:: local_ok
local accepted
**type**\: bool
.. attribute:: remote_ok
remote accepted
**type**\: bool
.. attribute:: open_retry
open retry count
**type**\: int
**range:** 0..4294967295
.. attribute:: ref_cnt
ref count
**type**\: int
**range:** 0..4294967295
.. attribute:: rx_state_str
socket state
**type**\: str
.. attribute:: holddown_counter
holddown counter
**type**\: int
**range:** 0..65535
.. attribute:: pcep_up_ts
PCEP up timestamp
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: precedence
Precedence
**type**\: int
**range:** 0..255
.. attribute:: ka_interval_local
KA interval local
**type**\: int
**range:** 0..4294967295
.. attribute:: ka_interval_remote
KA interval remote
**type**\: int
**range:** 0..4294967295
.. attribute:: dead_interval_local
Dead interval local
**type**\: int
**range:** 0..4294967295
.. attribute:: dead_interval_remote
Dead interval remote
**type**\: int
**range:** 0..4294967295
.. attribute:: pcep_session_id_local
PCEP session ID local
**type**\: int
**range:** 0..4294967295
.. attribute:: pcep_session_id_remote
PCEP session ID remote
**type**\: int
**range:** 0..4294967295
.. attribute:: pcep_server_ipv4_addr
PCEP server Ipv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: pcep_client_ipv4_addr
PCEP client Ipv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: is_stateful_local
is stateful local
**type**\: bool
.. attribute:: is_stateful_remote
is stateful remote
**type**\: bool
.. attribute:: is_stateful_u_flag_local
is stateful with U flag local
**type**\: bool
.. attribute:: is_stateful_u_flag_remote
is stateful with U flag remote
**type**\: bool
.. attribute:: is_segment_routing_local
is segment routing local
**type**\: bool
.. attribute:: is_segment_routing_remote
is segment routing remote
**type**\: bool
.. attribute:: is_best_pce
is this the best PCE to delegate to
**type**\: bool
.. attribute:: sr_msd_local
SR MSD local
**type**\: int
**range:** 0..255
.. attribute:: sr_msd_remote
SR MSD remote
**type**\: int
**range:** 0..255
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Peers.Peer, self).__init__()
self.yang_name = "peer"
self.yang_parent_name = "peers"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['peer_addr']
self._child_container_classes = OrderedDict([("socket-info", ("socket_info", Pcc.Peers.Peer.SocketInfo)), ("stats", ("stats", Pcc.Peers.Peer.Stats))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('peer_addr', YLeaf(YType.str, 'peer-addr')),
('handle', YLeaf(YType.uint32, 'handle')),
('state_str', YLeaf(YType.str, 'state-str')),
('local_ok', YLeaf(YType.boolean, 'local-ok')),
('remote_ok', YLeaf(YType.boolean, 'remote-ok')),
('open_retry', YLeaf(YType.uint32, 'open-retry')),
('ref_cnt', YLeaf(YType.uint32, 'ref-cnt')),
('rx_state_str', YLeaf(YType.str, 'rx-state-str')),
('holddown_counter', YLeaf(YType.uint16, 'holddown-counter')),
('pcep_up_ts', YLeaf(YType.uint64, 'pcep-up-ts')),
('precedence', YLeaf(YType.uint8, 'precedence')),
('ka_interval_local', YLeaf(YType.uint32, 'ka-interval-local')),
('ka_interval_remote', YLeaf(YType.uint32, 'ka-interval-remote')),
('dead_interval_local', YLeaf(YType.uint32, 'dead-interval-local')),
('dead_interval_remote', YLeaf(YType.uint32, 'dead-interval-remote')),
('pcep_session_id_local', YLeaf(YType.uint32, 'pcep-session-id-local')),
('pcep_session_id_remote', YLeaf(YType.uint32, 'pcep-session-id-remote')),
('pcep_server_ipv4_addr', YLeaf(YType.str, 'pcep-server-ipv4-addr')),
('pcep_client_ipv4_addr', YLeaf(YType.str, 'pcep-client-ipv4-addr')),
('is_stateful_local', YLeaf(YType.boolean, 'is-stateful-local')),
('is_stateful_remote', YLeaf(YType.boolean, 'is-stateful-remote')),
('is_stateful_u_flag_local', YLeaf(YType.boolean, 'is-stateful-u-flag-local')),
('is_stateful_u_flag_remote', YLeaf(YType.boolean, 'is-stateful-u-flag-remote')),
('is_segment_routing_local', YLeaf(YType.boolean, 'is-segment-routing-local')),
('is_segment_routing_remote', YLeaf(YType.boolean, 'is-segment-routing-remote')),
('is_best_pce', YLeaf(YType.boolean, 'is-best-pce')),
('sr_msd_local', YLeaf(YType.uint8, 'sr-msd-local')),
('sr_msd_remote', YLeaf(YType.uint8, 'sr-msd-remote')),
])
self.peer_addr = None
self.handle = None
self.state_str = None
self.local_ok = None
self.remote_ok = None
self.open_retry = None
self.ref_cnt = None
self.rx_state_str = None
self.holddown_counter = None
self.pcep_up_ts = None
self.precedence = None
self.ka_interval_local = None
self.ka_interval_remote = None
self.dead_interval_local = None
self.dead_interval_remote = None
self.pcep_session_id_local = None
self.pcep_session_id_remote = None
self.pcep_server_ipv4_addr = None
self.pcep_client_ipv4_addr = None
self.is_stateful_local = None
self.is_stateful_remote = None
self.is_stateful_u_flag_local = None
self.is_stateful_u_flag_remote = None
self.is_segment_routing_local = None
self.is_segment_routing_remote = None
self.is_best_pce = None
self.sr_msd_local = None
self.sr_msd_remote = None
self.socket_info = Pcc.Peers.Peer.SocketInfo()
self.socket_info.parent = self
self._children_name_map["socket_info"] = "socket-info"
self._children_yang_names.add("socket-info")
self.stats = Pcc.Peers.Peer.Stats()
self.stats.parent = self
self._children_name_map["stats"] = "stats"
self._children_yang_names.add("stats")
self._segment_path = lambda: "peer" + "[peer-addr='" + str(self.peer_addr) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:pcc/peers/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Peers.Peer, ['peer_addr', 'handle', 'state_str', 'local_ok', 'remote_ok', 'open_retry', 'ref_cnt', 'rx_state_str', 'holddown_counter', 'pcep_up_ts', 'precedence', 'ka_interval_local', 'ka_interval_remote', 'dead_interval_local', 'dead_interval_remote', 'pcep_session_id_local', 'pcep_session_id_remote', 'pcep_server_ipv4_addr', 'pcep_client_ipv4_addr', 'is_stateful_local', 'is_stateful_remote', 'is_stateful_u_flag_local', 'is_stateful_u_flag_remote', 'is_segment_routing_local', 'is_segment_routing_remote', 'is_best_pce', 'sr_msd_local', 'sr_msd_remote'], name, value)
class SocketInfo(Entity):
"""
socket info
.. attribute:: fd
file descriptor
**type**\: int
**range:** \-9223372036854775808..9223372036854775807
.. attribute:: wnotify
write notify
**type**\: bool
.. attribute:: rnotify
read notify
**type**\: bool
.. attribute:: refcnt
ref count
**type**\: int
**range:** 0..4294967295
.. attribute:: selected
selected
**type**\: bool
.. attribute:: owner
owner
**type**\: int
**range:** 0..4294967295
.. attribute:: csockaddr_str
client address
**type**\: str
.. attribute:: ssockaddr_str
server address
**type**\: str
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Peers.Peer.SocketInfo, self).__init__()
self.yang_name = "socket-info"
self.yang_parent_name = "peer"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('fd', YLeaf(YType.int64, 'fd')),
('wnotify', YLeaf(YType.boolean, 'wnotify')),
('rnotify', YLeaf(YType.boolean, 'rnotify')),
('refcnt', YLeaf(YType.uint32, 'refcnt')),
('selected', YLeaf(YType.boolean, 'selected')),
('owner', YLeaf(YType.uint32, 'owner')),
('csockaddr_str', YLeaf(YType.str, 'csockaddr-str')),
('ssockaddr_str', YLeaf(YType.str, 'ssockaddr-str')),
])
self.fd = None
self.wnotify = None
self.rnotify = None
self.refcnt = None
self.selected = None
self.owner = None
self.csockaddr_str = None
self.ssockaddr_str = None
self._segment_path = lambda: "socket-info"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Peers.Peer.SocketInfo, ['fd', 'wnotify', 'rnotify', 'refcnt', 'selected', 'owner', 'csockaddr_str', 'ssockaddr_str'], name, value)
class Stats(Entity):
"""
stats
.. attribute:: ka_msg_rx
KA messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ka_msg_fail_rx
KA messages fail rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ka_msg_tx
KA messages txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: ka_msg_fail_tx
KA messages fail txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcreq_msg_rx
PCREQ messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcreq_msg_fail_rx
PCREQ messages fail rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcreq_msg_tx
PCREQ messages txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcreq_msg_fail_tx
PCREQ messages fail txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcrep_msg_rx
PCREP messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcrep_msg_fail_rx
PCREP messages fail rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcrep_msg_tx
PCREP messages txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcrep_msg_fail_tx
PCREP messages fail txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcrpt_msg_rx
PCRPT messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcrpt_msg_fail_rx
PCRPT messages fail rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcrpt_msg_tx
PCRPT messages txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcrpt_msg_fail_tx
PCRPT messages fail txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcupd_msg_rx
PCUPD messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcupd_msg_fail_rx
PCUPD messages fail rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcupd_msg_tx
PCUPD messages txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcupd_msg_fail_tx
PCUPD messages fail txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: open_msg_rx
OPEN messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: open_msg_fail_rx
OPEN messages fail rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: open_msg_tx
OPEN messages txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: open_msg_fail_tx
OPEN messages fail txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcerr_msg_rx
PCERR messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcerr_msg_fail_rx
PCERR messages fail rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcerr_msg_tx
PCERR messages txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcerr_msg_fail_tx
PCERR messages fail txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcntf_msg_rx
PCNTF messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcntf_msg_fail_rx
PCNTF messages fail rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcntf_msg_tx
PCNTF messages txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcntf_msg_fail_tx
PCNTF messages fail txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pce_eos_msg_tx
PCE EOS messages txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pce_eos_msg_fail_tx
PCE EOS messages fail txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: close_msg_rx
CLOSE messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: close_msg_fail_rx
CLOSE messages fail rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: close_msg_tx
CLOSE messages txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: close_msg_fail_tx
CLOSE messages fail txed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: unexpected_msg_rx
Unexpected messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: corrupted_msg_rx
Corrupted messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: reply_time_index
index into recorded reply time
**type**\: int
**range:** 0..4294967295
.. attribute:: minimum_reply_time
min reply time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: maximum_reply_time
max reply time
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: requests_timed_out
requests timed out
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: last_pcerr_type_rx
last PCERR type received
**type**\: int
**range:** 0..255
.. attribute:: last_pcerr_val_rx
last PCERR value received
**type**\: int
**range:** 0..255
.. attribute:: last_pcerr_rx_ts
last time when PCERR was received
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: last_pcerr_type_tx
last PCERR type transmitted
**type**\: int
**range:** 0..255
.. attribute:: last_pcerr_val_tx
last PCERR value transmitted
**type**\: int
**range:** 0..255
.. attribute:: last_pcerr_tx_ts
last time when PCERR was transmitted
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcinitiate_msg_rx
PCINITIATE messages rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: pcinitiate_msg_fail_rx
PCINITIATE messages fail rxed
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: recorded_reply_time
Recorded reply time
**type**\: list of int
**range:** 0..18446744073709551615
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Pcc.Peers.Peer.Stats, self).__init__()
self.yang_name = "stats"
self.yang_parent_name = "peer"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('ka_msg_rx', YLeaf(YType.uint64, 'ka-msg-rx')),
('ka_msg_fail_rx', YLeaf(YType.uint64, 'ka-msg-fail-rx')),
('ka_msg_tx', YLeaf(YType.uint64, 'ka-msg-tx')),
('ka_msg_fail_tx', YLeaf(YType.uint64, 'ka-msg-fail-tx')),
('pcreq_msg_rx', YLeaf(YType.uint64, 'pcreq-msg-rx')),
('pcreq_msg_fail_rx', YLeaf(YType.uint64, 'pcreq-msg-fail-rx')),
('pcreq_msg_tx', YLeaf(YType.uint64, 'pcreq-msg-tx')),
('pcreq_msg_fail_tx', YLeaf(YType.uint64, 'pcreq-msg-fail-tx')),
('pcrep_msg_rx', YLeaf(YType.uint64, 'pcrep-msg-rx')),
('pcrep_msg_fail_rx', YLeaf(YType.uint64, 'pcrep-msg-fail-rx')),
('pcrep_msg_tx', YLeaf(YType.uint64, 'pcrep-msg-tx')),
('pcrep_msg_fail_tx', YLeaf(YType.uint64, 'pcrep-msg-fail-tx')),
('pcrpt_msg_rx', YLeaf(YType.uint64, 'pcrpt-msg-rx')),
('pcrpt_msg_fail_rx', YLeaf(YType.uint64, 'pcrpt-msg-fail-rx')),
('pcrpt_msg_tx', YLeaf(YType.uint64, 'pcrpt-msg-tx')),
('pcrpt_msg_fail_tx', YLeaf(YType.uint64, 'pcrpt-msg-fail-tx')),
('pcupd_msg_rx', YLeaf(YType.uint64, 'pcupd-msg-rx')),
('pcupd_msg_fail_rx', YLeaf(YType.uint64, 'pcupd-msg-fail-rx')),
('pcupd_msg_tx', YLeaf(YType.uint64, 'pcupd-msg-tx')),
('pcupd_msg_fail_tx', YLeaf(YType.uint64, 'pcupd-msg-fail-tx')),
('open_msg_rx', YLeaf(YType.uint64, 'open-msg-rx')),
('open_msg_fail_rx', YLeaf(YType.uint64, 'open-msg-fail-rx')),
('open_msg_tx', YLeaf(YType.uint64, 'open-msg-tx')),
('open_msg_fail_tx', YLeaf(YType.uint64, 'open-msg-fail-tx')),
('pcerr_msg_rx', YLeaf(YType.uint64, 'pcerr-msg-rx')),
('pcerr_msg_fail_rx', YLeaf(YType.uint64, 'pcerr-msg-fail-rx')),
('pcerr_msg_tx', YLeaf(YType.uint64, 'pcerr-msg-tx')),
('pcerr_msg_fail_tx', YLeaf(YType.uint64, 'pcerr-msg-fail-tx')),
('pcntf_msg_rx', YLeaf(YType.uint64, 'pcntf-msg-rx')),
('pcntf_msg_fail_rx', YLeaf(YType.uint64, 'pcntf-msg-fail-rx')),
('pcntf_msg_tx', YLeaf(YType.uint64, 'pcntf-msg-tx')),
('pcntf_msg_fail_tx', YLeaf(YType.uint64, 'pcntf-msg-fail-tx')),
('pce_eos_msg_tx', YLeaf(YType.uint64, 'pce-eos-msg-tx')),
('pce_eos_msg_fail_tx', YLeaf(YType.uint64, 'pce-eos-msg-fail-tx')),
('close_msg_rx', YLeaf(YType.uint64, 'close-msg-rx')),
('close_msg_fail_rx', YLeaf(YType.uint64, 'close-msg-fail-rx')),
('close_msg_tx', YLeaf(YType.uint64, 'close-msg-tx')),
('close_msg_fail_tx', YLeaf(YType.uint64, 'close-msg-fail-tx')),
('unexpected_msg_rx', YLeaf(YType.uint64, 'unexpected-msg-rx')),
('corrupted_msg_rx', YLeaf(YType.uint64, 'corrupted-msg-rx')),
('reply_time_index', YLeaf(YType.uint32, 'reply-time-index')),
('minimum_reply_time', YLeaf(YType.uint64, 'minimum-reply-time')),
('maximum_reply_time', YLeaf(YType.uint64, 'maximum-reply-time')),
('requests_timed_out', YLeaf(YType.uint64, 'requests-timed-out')),
('last_pcerr_type_rx', YLeaf(YType.uint8, 'last-pcerr-type-rx')),
('last_pcerr_val_rx', YLeaf(YType.uint8, 'last-pcerr-val-rx')),
('last_pcerr_rx_ts', YLeaf(YType.uint64, 'last-pcerr-rx-ts')),
('last_pcerr_type_tx', YLeaf(YType.uint8, 'last-pcerr-type-tx')),
('last_pcerr_val_tx', YLeaf(YType.uint8, 'last-pcerr-val-tx')),
('last_pcerr_tx_ts', YLeaf(YType.uint64, 'last-pcerr-tx-ts')),
('pcinitiate_msg_rx', YLeaf(YType.uint64, 'pcinitiate-msg-rx')),
('pcinitiate_msg_fail_rx', YLeaf(YType.uint64, 'pcinitiate-msg-fail-rx')),
('recorded_reply_time', YLeafList(YType.uint64, 'recorded-reply-time')),
])
self.ka_msg_rx = None
self.ka_msg_fail_rx = None
self.ka_msg_tx = None
self.ka_msg_fail_tx = None
self.pcreq_msg_rx = None
self.pcreq_msg_fail_rx = None
self.pcreq_msg_tx = None
self.pcreq_msg_fail_tx = None
self.pcrep_msg_rx = None
self.pcrep_msg_fail_rx = None
self.pcrep_msg_tx = None
self.pcrep_msg_fail_tx = None
self.pcrpt_msg_rx = None
self.pcrpt_msg_fail_rx = None
self.pcrpt_msg_tx = None
self.pcrpt_msg_fail_tx = None
self.pcupd_msg_rx = None
self.pcupd_msg_fail_rx = None
self.pcupd_msg_tx = None
self.pcupd_msg_fail_tx = None
self.open_msg_rx = None
self.open_msg_fail_rx = None
self.open_msg_tx = None
self.open_msg_fail_tx = None
self.pcerr_msg_rx = None
self.pcerr_msg_fail_rx = None
self.pcerr_msg_tx = None
self.pcerr_msg_fail_tx = None
self.pcntf_msg_rx = None
self.pcntf_msg_fail_rx = None
self.pcntf_msg_tx = None
self.pcntf_msg_fail_tx = None
self.pce_eos_msg_tx = None
self.pce_eos_msg_fail_tx = None
self.close_msg_rx = None
self.close_msg_fail_rx = None
self.close_msg_tx = None
self.close_msg_fail_tx = None
self.unexpected_msg_rx = None
self.corrupted_msg_rx = None
self.reply_time_index = None
self.minimum_reply_time = None
self.maximum_reply_time = None
self.requests_timed_out = None
self.last_pcerr_type_rx = None
self.last_pcerr_val_rx = None
self.last_pcerr_rx_ts = None
self.last_pcerr_type_tx = None
self.last_pcerr_val_tx = None
self.last_pcerr_tx_ts = None
self.pcinitiate_msg_rx = None
self.pcinitiate_msg_fail_rx = None
self.recorded_reply_time = []
self._segment_path = lambda: "stats"
def __setattr__(self, name, value):
self._perform_setattr(Pcc.Peers.Peer.Stats, ['ka_msg_rx', 'ka_msg_fail_rx', 'ka_msg_tx', 'ka_msg_fail_tx', 'pcreq_msg_rx', 'pcreq_msg_fail_rx', 'pcreq_msg_tx', 'pcreq_msg_fail_tx', 'pcrep_msg_rx', 'pcrep_msg_fail_rx', 'pcrep_msg_tx', 'pcrep_msg_fail_tx', 'pcrpt_msg_rx', 'pcrpt_msg_fail_rx', 'pcrpt_msg_tx', 'pcrpt_msg_fail_tx', 'pcupd_msg_rx', 'pcupd_msg_fail_rx', 'pcupd_msg_tx', 'pcupd_msg_fail_tx', 'open_msg_rx', 'open_msg_fail_rx', 'open_msg_tx', 'open_msg_fail_tx', 'pcerr_msg_rx', 'pcerr_msg_fail_rx', 'pcerr_msg_tx', 'pcerr_msg_fail_tx', 'pcntf_msg_rx', 'pcntf_msg_fail_rx', 'pcntf_msg_tx', 'pcntf_msg_fail_tx', 'pce_eos_msg_tx', 'pce_eos_msg_fail_tx', 'close_msg_rx', 'close_msg_fail_rx', 'close_msg_tx', 'close_msg_fail_tx', 'unexpected_msg_rx', 'corrupted_msg_rx', 'reply_time_index', 'minimum_reply_time', 'maximum_reply_time', 'requests_timed_out', 'last_pcerr_type_rx', 'last_pcerr_val_rx', 'last_pcerr_rx_ts', 'last_pcerr_type_tx', 'last_pcerr_val_tx', 'last_pcerr_tx_ts', 'pcinitiate_msg_rx', 'pcinitiate_msg_fail_rx', 'recorded_reply_time'], name, value)
def clone_ptr(self):
self._top_entity = Pcc()
return self._top_entity
class Xtc(Entity):
"""
xtc
.. attribute:: policies
Policy database in XTC Agent
**type**\: :py:class:`Policies <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies>`
.. attribute:: on_demand_colors
On\-Demand Color database in XTC Agent
**type**\: :py:class:`OnDemandColors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.OnDemandColors>`
.. attribute:: forwarding
Forwarding information
**type**\: :py:class:`Forwarding <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Forwarding>`
.. attribute:: topology_summary
Node summary database
**type**\: :py:class:`TopologySummary <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologySummary>`
.. attribute:: topology_nodes
Node database in XTC Agent
**type**\: :py:class:`TopologyNodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes>`
.. attribute:: prefix_infos
Prefixes database in XTC Agent
**type**\: :py:class:`PrefixInfos <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.PrefixInfos>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc, self).__init__()
self._top_entity = None
self.yang_name = "xtc"
self.yang_parent_name = "Cisco-IOS-XR-infra-xtc-agent-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("policies", ("policies", Xtc.Policies)), ("on-demand-colors", ("on_demand_colors", Xtc.OnDemandColors)), ("forwarding", ("forwarding", Xtc.Forwarding)), ("topology-summary", ("topology_summary", Xtc.TopologySummary)), ("topology-nodes", ("topology_nodes", Xtc.TopologyNodes)), ("prefix-infos", ("prefix_infos", Xtc.PrefixInfos))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.policies = Xtc.Policies()
self.policies.parent = self
self._children_name_map["policies"] = "policies"
self._children_yang_names.add("policies")
self.on_demand_colors = Xtc.OnDemandColors()
self.on_demand_colors.parent = self
self._children_name_map["on_demand_colors"] = "on-demand-colors"
self._children_yang_names.add("on-demand-colors")
self.forwarding = Xtc.Forwarding()
self.forwarding.parent = self
self._children_name_map["forwarding"] = "forwarding"
self._children_yang_names.add("forwarding")
self.topology_summary = Xtc.TopologySummary()
self.topology_summary.parent = self
self._children_name_map["topology_summary"] = "topology-summary"
self._children_yang_names.add("topology-summary")
self.topology_nodes = Xtc.TopologyNodes()
self.topology_nodes.parent = self
self._children_name_map["topology_nodes"] = "topology-nodes"
self._children_yang_names.add("topology-nodes")
self.prefix_infos = Xtc.PrefixInfos()
self.prefix_infos.parent = self
self._children_name_map["prefix_infos"] = "prefix-infos"
self._children_yang_names.add("prefix-infos")
self._segment_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc"
class Policies(Entity):
"""
Policy database in XTC Agent
.. attribute:: policy
Policy information
**type**\: list of :py:class:`Policy <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies, self).__init__()
self.yang_name = "policies"
self.yang_parent_name = "xtc"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("policy", ("policy", Xtc.Policies.Policy))])
self._leafs = OrderedDict()
self.policy = YList(self)
self._segment_path = lambda: "policies"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies, [], name, value)
class Policy(Entity):
"""
Policy information
.. attribute:: id (key)
Policy ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: destination_address
Destination address
**type**\: :py:class:`DestinationAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.DestinationAddress>`
.. attribute:: binding_sid
Binding SID information
**type**\: :py:class:`BindingSid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.BindingSid>`
.. attribute:: auto_policy_info
Autopolicy information
**type**\: :py:class:`AutoPolicyInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.AutoPolicyInfo>`
.. attribute:: policy_name
Policy name
**type**\: str
.. attribute:: administrative_up
Admin up
**type**\: int
**range:** 0..4294967295
.. attribute:: operational_up
Operational up
**type**\: int
**range:** 0..4294967295
.. attribute:: color
Color
**type**\: int
**range:** 0..4294967295
.. attribute:: is_auto_policy
Whether policy was automatically created or configured
**type**\: bool
.. attribute:: transition_count
Indicates number of up/down transitions
**type**\: int
**range:** 0..4294967295
.. attribute:: forward_class
Forward class of the policy
**type**\: int
**range:** 0..4294967295
.. attribute:: up_time
Policy up time in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**units**\: nanosecond
.. attribute:: up_age
Policy up age (since) in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**units**\: nanosecond
.. attribute:: down_time
Policy down time in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**units**\: nanosecond
.. attribute:: down_age
Policy down age (since) in nano seconds
**type**\: int
**range:** 0..18446744073709551615
**units**\: nanosecond
.. attribute:: lsp_id
LSP ID
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_handle
Interface handle
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_group_identifier
Policy group identifier
**type**\: int
**range:** 0..65535
.. attribute:: local_label_identifier
Local label identifier
**type**\: int
**range:** 0..65535
.. attribute:: local_label
Local label
**type**\: int
**range:** 0..4294967295
.. attribute:: paths
Path options
**type**\: list of :py:class:`Paths <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.Paths>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy, self).__init__()
self.yang_name = "policy"
self.yang_parent_name = "policies"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['id']
self._child_container_classes = OrderedDict([("destination-address", ("destination_address", Xtc.Policies.Policy.DestinationAddress)), ("binding-sid", ("binding_sid", Xtc.Policies.Policy.BindingSid)), ("auto-policy-info", ("auto_policy_info", Xtc.Policies.Policy.AutoPolicyInfo))])
self._child_list_classes = OrderedDict([("paths", ("paths", Xtc.Policies.Policy.Paths))])
self._leafs = OrderedDict([
('id', YLeaf(YType.int32, 'id')),
('policy_name', YLeaf(YType.str, 'policy-name')),
('administrative_up', YLeaf(YType.uint32, 'administrative-up')),
('operational_up', YLeaf(YType.uint32, 'operational-up')),
('color', YLeaf(YType.uint32, 'color')),
('is_auto_policy', YLeaf(YType.boolean, 'is-auto-policy')),
('transition_count', YLeaf(YType.uint32, 'transition-count')),
('forward_class', YLeaf(YType.uint32, 'forward-class')),
('up_time', YLeaf(YType.uint64, 'up-time')),
('up_age', YLeaf(YType.uint64, 'up-age')),
('down_time', YLeaf(YType.uint64, 'down-time')),
('down_age', YLeaf(YType.uint64, 'down-age')),
('lsp_id', YLeaf(YType.uint32, 'lsp-id')),
('interface_handle', YLeaf(YType.uint32, 'interface-handle')),
('policy_group_identifier', YLeaf(YType.uint16, 'policy-group-identifier')),
('local_label_identifier', YLeaf(YType.uint16, 'local-label-identifier')),
('local_label', YLeaf(YType.uint32, 'local-label')),
])
self.id = None
self.policy_name = None
self.administrative_up = None
self.operational_up = None
self.color = None
self.is_auto_policy = None
self.transition_count = None
self.forward_class = None
self.up_time = None
self.up_age = None
self.down_time = None
self.down_age = None
self.lsp_id = None
self.interface_handle = None
self.policy_group_identifier = None
self.local_label_identifier = None
self.local_label = None
self.destination_address = Xtc.Policies.Policy.DestinationAddress()
self.destination_address.parent = self
self._children_name_map["destination_address"] = "destination-address"
self._children_yang_names.add("destination-address")
self.binding_sid = Xtc.Policies.Policy.BindingSid()
self.binding_sid.parent = self
self._children_name_map["binding_sid"] = "binding-sid"
self._children_yang_names.add("binding-sid")
self.auto_policy_info = Xtc.Policies.Policy.AutoPolicyInfo()
self.auto_policy_info.parent = self
self._children_name_map["auto_policy_info"] = "auto-policy-info"
self._children_yang_names.add("auto-policy-info")
self.paths = YList(self)
self._segment_path = lambda: "policy" + "[id='" + str(self.id) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/policies/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy, ['id', 'policy_name', 'administrative_up', 'operational_up', 'color', 'is_auto_policy', 'transition_count', 'forward_class', 'up_time', 'up_age', 'down_time', 'down_age', 'lsp_id', 'interface_handle', 'policy_group_identifier', 'local_label_identifier', 'local_label'], name, value)
class DestinationAddress(Entity):
"""
Destination address
.. attribute:: af_name
AFName
**type**\: :py:class:`XtcAfId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcAfId>`
.. attribute:: ipv4
IPv4 address type
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6
IPv6 address type
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.DestinationAddress, self).__init__()
self.yang_name = "destination-address"
self.yang_parent_name = "policy"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('af_name', YLeaf(YType.enumeration, 'af-name')),
('ipv4', YLeaf(YType.str, 'ipv4')),
('ipv6', YLeaf(YType.str, 'ipv6')),
])
self.af_name = None
self.ipv4 = None
self.ipv6 = None
self._segment_path = lambda: "destination-address"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.DestinationAddress, ['af_name', 'ipv4', 'ipv6'], name, value)
class BindingSid(Entity):
"""
Binding SID information
.. attribute:: value
Binding SID value
**type**\: :py:class:`Value <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.BindingSid.Value>`
.. attribute:: bsid_mode
Binding SID Mode
**type**\: :py:class:`XtcBsidMode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcBsidMode>`
.. attribute:: error
Binding SID error, if any
**type**\: :py:class:`XtcBsidError <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcBsidError>`
.. attribute:: state
State of the binding SID
**type**\: str
.. attribute:: explicit_based
Whether the binding SID is explicit\-based
**type**\: bool
.. attribute:: policy_selected
Whether the policy is selected for forwarding on this BSID
**type**\: bool
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.BindingSid, self).__init__()
self.yang_name = "binding-sid"
self.yang_parent_name = "policy"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("value", ("value", Xtc.Policies.Policy.BindingSid.Value))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('bsid_mode', YLeaf(YType.enumeration, 'bsid-mode')),
('error', YLeaf(YType.enumeration, 'error')),
('state', YLeaf(YType.str, 'state')),
('explicit_based', YLeaf(YType.boolean, 'explicit-based')),
('policy_selected', YLeaf(YType.boolean, 'policy-selected')),
])
self.bsid_mode = None
self.error = None
self.state = None
self.explicit_based = None
self.policy_selected = None
self.value = Xtc.Policies.Policy.BindingSid.Value()
self.value.parent = self
self._children_name_map["value"] = "value"
self._children_yang_names.add("value")
self._segment_path = lambda: "binding-sid"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.BindingSid, ['bsid_mode', 'error', 'state', 'explicit_based', 'policy_selected'], name, value)
class Value(Entity):
"""
Binding SID value
.. attribute:: sid_type
SIDType
**type**\: :py:class:`XtcSid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcSid>`
.. attribute:: label
MPLS label
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.BindingSid.Value, self).__init__()
self.yang_name = "value"
self.yang_parent_name = "binding-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('sid_type', YLeaf(YType.enumeration, 'sid-type')),
('label', YLeaf(YType.uint32, 'label')),
('ipv6', YLeaf(YType.str, 'ipv6')),
])
self.sid_type = None
self.label = None
self.ipv6 = None
self._segment_path = lambda: "value"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.BindingSid.Value, ['sid_type', 'label', 'ipv6'], name, value)
class AutoPolicyInfo(Entity):
"""
Autopolicy information
.. attribute:: creator_name
Name of client who created policy
**type**\: str
.. attribute:: distinguisher
Distinguisher
**type**\: int
**range:** 0..4294967295
.. attribute:: preference
Preference of the policy
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6_caps_enabled
Whether IPv6 caps was requested to be enabled
**type**\: bool
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.AutoPolicyInfo, self).__init__()
self.yang_name = "auto-policy-info"
self.yang_parent_name = "policy"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('creator_name', YLeaf(YType.str, 'creator-name')),
('distinguisher', YLeaf(YType.uint32, 'distinguisher')),
('preference', YLeaf(YType.uint32, 'preference')),
('ipv6_caps_enabled', YLeaf(YType.boolean, 'ipv6-caps-enabled')),
])
self.creator_name = None
self.distinguisher = None
self.preference = None
self.ipv6_caps_enabled = None
self._segment_path = lambda: "auto-policy-info"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.AutoPolicyInfo, ['creator_name', 'distinguisher', 'preference', 'ipv6_caps_enabled'], name, value)
class Paths(Entity):
"""
Path options
.. attribute:: sr_path_constraints
SR path constraints
**type**\: :py:class:`SrPathConstraints <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.Paths.SrPathConstraints>`
.. attribute:: index
Index number
**type**\: int
**range:** 0..4294967295
.. attribute:: type
Path option type
**type**\: :py:class:`XtcPolicyPath <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcPolicyPath>`
.. attribute:: name
Explicit path option name
**type**\: str
.. attribute:: active
Whether the path is active (used)
**type**\: bool
.. attribute:: weight
Configured weight of the path\-option
**type**\: int
**range:** 0..4294967295
.. attribute:: metric_type
Configured path metric type
**type**\: int
**range:** 0..255
.. attribute:: metric_value
Path metric value
**type**\: int
**range:** 0..4294967295
.. attribute:: is_valid
True if path is valid
**type**\: bool
.. attribute:: pce_based_path
True if the path is to be computed by PCE
**type**\: bool
.. attribute:: pce_address
Address of the PCE computed the path
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: error
Error (for display only)
**type**\: str
.. attribute:: hops
SR hop list
**type**\: list of :py:class:`Hops <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.Paths.Hops>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.Paths, self).__init__()
self.yang_name = "paths"
self.yang_parent_name = "policy"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("sr-path-constraints", ("sr_path_constraints", Xtc.Policies.Policy.Paths.SrPathConstraints))])
self._child_list_classes = OrderedDict([("hops", ("hops", Xtc.Policies.Policy.Paths.Hops))])
self._leafs = OrderedDict([
('index', YLeaf(YType.uint32, 'index')),
('type', YLeaf(YType.enumeration, 'type')),
('name', YLeaf(YType.str, 'name')),
('active', YLeaf(YType.boolean, 'active')),
('weight', YLeaf(YType.uint32, 'weight')),
('metric_type', YLeaf(YType.uint8, 'metric-type')),
('metric_value', YLeaf(YType.uint32, 'metric-value')),
('is_valid', YLeaf(YType.boolean, 'is-valid')),
('pce_based_path', YLeaf(YType.boolean, 'pce-based-path')),
('pce_address', YLeaf(YType.str, 'pce-address')),
('error', YLeaf(YType.str, 'error')),
])
self.index = None
self.type = None
self.name = None
self.active = None
self.weight = None
self.metric_type = None
self.metric_value = None
self.is_valid = None
self.pce_based_path = None
self.pce_address = None
self.error = None
self.sr_path_constraints = Xtc.Policies.Policy.Paths.SrPathConstraints()
self.sr_path_constraints.parent = self
self._children_name_map["sr_path_constraints"] = "sr-path-constraints"
self._children_yang_names.add("sr-path-constraints")
self.hops = YList(self)
self._segment_path = lambda: "paths"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.Paths, ['index', 'type', 'name', 'active', 'weight', 'metric_type', 'metric_value', 'is_valid', 'pce_based_path', 'pce_address', 'error'], name, value)
class SrPathConstraints(Entity):
"""
SR path constraints
.. attribute:: path_metrics
Path metrics
**type**\: :py:class:`PathMetrics <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.Paths.SrPathConstraints.PathMetrics>`
.. attribute:: affinity_constraint
Affinity constraints list
**type**\: list of :py:class:`AffinityConstraint <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.Paths.SrPathConstraints.AffinityConstraint>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.Paths.SrPathConstraints, self).__init__()
self.yang_name = "sr-path-constraints"
self.yang_parent_name = "paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("path-metrics", ("path_metrics", Xtc.Policies.Policy.Paths.SrPathConstraints.PathMetrics))])
self._child_list_classes = OrderedDict([("affinity-constraint", ("affinity_constraint", Xtc.Policies.Policy.Paths.SrPathConstraints.AffinityConstraint))])
self._leafs = OrderedDict()
self.path_metrics = Xtc.Policies.Policy.Paths.SrPathConstraints.PathMetrics()
self.path_metrics.parent = self
self._children_name_map["path_metrics"] = "path-metrics"
self._children_yang_names.add("path-metrics")
self.affinity_constraint = YList(self)
self._segment_path = lambda: "sr-path-constraints"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.Paths.SrPathConstraints, [], name, value)
class PathMetrics(Entity):
"""
Path metrics
.. attribute:: margin_relative
Margin Relative
**type**\: int
**range:** 0..255
.. attribute:: margin_absolute
Margin Absolute
**type**\: int
**range:** 0..255
.. attribute:: maximum_segments
Maximum number of segments
**type**\: int
**range:** 0..65535
.. attribute:: accumulative_te_metric
Accumulative TE metric
**type**\: int
**range:** 0..4294967295
.. attribute:: accumulative_igp_metric
Accumulative IGP metric
**type**\: int
**range:** 0..4294967295
.. attribute:: accumulative_delay
Accumulative delay
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.Paths.SrPathConstraints.PathMetrics, self).__init__()
self.yang_name = "path-metrics"
self.yang_parent_name = "sr-path-constraints"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('margin_relative', YLeaf(YType.uint8, 'margin-relative')),
('margin_absolute', YLeaf(YType.uint8, 'margin-absolute')),
('maximum_segments', YLeaf(YType.uint16, 'maximum-segments')),
('accumulative_te_metric', YLeaf(YType.uint32, 'accumulative-te-metric')),
('accumulative_igp_metric', YLeaf(YType.uint32, 'accumulative-igp-metric')),
('accumulative_delay', YLeaf(YType.uint32, 'accumulative-delay')),
])
self.margin_relative = None
self.margin_absolute = None
self.maximum_segments = None
self.accumulative_te_metric = None
self.accumulative_igp_metric = None
self.accumulative_delay = None
self._segment_path = lambda: "path-metrics"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.Paths.SrPathConstraints.PathMetrics, ['margin_relative', 'margin_absolute', 'maximum_segments', 'accumulative_te_metric', 'accumulative_igp_metric', 'accumulative_delay'], name, value)
class AffinityConstraint(Entity):
"""
Affinity constraints list
.. attribute:: type
Affinity type
**type**\: int
**range:** 0..255
.. attribute:: value
Affinity value
**type**\: int
**range:** 0..4294967295
.. attribute:: color
Colors
**type**\: list of :py:class:`Color <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.Paths.SrPathConstraints.AffinityConstraint.Color>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.Paths.SrPathConstraints.AffinityConstraint, self).__init__()
self.yang_name = "affinity-constraint"
self.yang_parent_name = "sr-path-constraints"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("color", ("color", Xtc.Policies.Policy.Paths.SrPathConstraints.AffinityConstraint.Color))])
self._leafs = OrderedDict([
('type', YLeaf(YType.uint8, 'type')),
('value', YLeaf(YType.uint32, 'value')),
])
self.type = None
self.value = None
self.color = YList(self)
self._segment_path = lambda: "affinity-constraint"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.Paths.SrPathConstraints.AffinityConstraint, ['type', 'value'], name, value)
class Color(Entity):
"""
Colors
.. attribute:: color
An affinity color
**type**\: str
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.Paths.SrPathConstraints.AffinityConstraint.Color, self).__init__()
self.yang_name = "color"
self.yang_parent_name = "affinity-constraint"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('color', YLeaf(YType.str, 'color')),
])
self.color = None
self._segment_path = lambda: "color"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.Paths.SrPathConstraints.AffinityConstraint.Color, ['color'], name, value)
class Hops(Entity):
"""
SR hop list
.. attribute:: sid
SID value
**type**\: :py:class:`Sid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.Paths.Hops.Sid>`
.. attribute:: local_address
Local address
**type**\: :py:class:`LocalAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.Paths.Hops.LocalAddress>`
.. attribute:: remote_address
Remote address
**type**\: :py:class:`RemoteAddress <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Policies.Policy.Paths.Hops.RemoteAddress>`
.. attribute:: sid_type
SID type
**type**\: :py:class:`XtcSrSid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcSrSid>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.Paths.Hops, self).__init__()
self.yang_name = "hops"
self.yang_parent_name = "paths"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("sid", ("sid", Xtc.Policies.Policy.Paths.Hops.Sid)), ("local-address", ("local_address", Xtc.Policies.Policy.Paths.Hops.LocalAddress)), ("remote-address", ("remote_address", Xtc.Policies.Policy.Paths.Hops.RemoteAddress))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('sid_type', YLeaf(YType.enumeration, 'sid-type')),
])
self.sid_type = None
self.sid = Xtc.Policies.Policy.Paths.Hops.Sid()
self.sid.parent = self
self._children_name_map["sid"] = "sid"
self._children_yang_names.add("sid")
self.local_address = Xtc.Policies.Policy.Paths.Hops.LocalAddress()
self.local_address.parent = self
self._children_name_map["local_address"] = "local-address"
self._children_yang_names.add("local-address")
self.remote_address = Xtc.Policies.Policy.Paths.Hops.RemoteAddress()
self.remote_address.parent = self
self._children_name_map["remote_address"] = "remote-address"
self._children_yang_names.add("remote-address")
self._segment_path = lambda: "hops"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.Paths.Hops, ['sid_type'], name, value)
class Sid(Entity):
"""
SID value
.. attribute:: sid_type
SIDType
**type**\: :py:class:`XtcSid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcSid>`
.. attribute:: label
MPLS label
**type**\: int
**range:** 0..4294967295
.. attribute:: ipv6
IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.Paths.Hops.Sid, self).__init__()
self.yang_name = "sid"
self.yang_parent_name = "hops"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('sid_type', YLeaf(YType.enumeration, 'sid-type')),
('label', YLeaf(YType.uint32, 'label')),
('ipv6', YLeaf(YType.str, 'ipv6')),
])
self.sid_type = None
self.label = None
self.ipv6 = None
self._segment_path = lambda: "sid"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.Paths.Hops.Sid, ['sid_type', 'label', 'ipv6'], name, value)
class LocalAddress(Entity):
"""
Local address
.. attribute:: af_name
AFName
**type**\: :py:class:`XtcAfId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcAfId>`
.. attribute:: ipv4
IPv4 address type
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6
IPv6 address type
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.Paths.Hops.LocalAddress, self).__init__()
self.yang_name = "local-address"
self.yang_parent_name = "hops"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('af_name', YLeaf(YType.enumeration, 'af-name')),
('ipv4', YLeaf(YType.str, 'ipv4')),
('ipv6', YLeaf(YType.str, 'ipv6')),
])
self.af_name = None
self.ipv4 = None
self.ipv6 = None
self._segment_path = lambda: "local-address"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.Paths.Hops.LocalAddress, ['af_name', 'ipv4', 'ipv6'], name, value)
class RemoteAddress(Entity):
"""
Remote address
.. attribute:: af_name
AFName
**type**\: :py:class:`XtcAfId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcAfId>`
.. attribute:: ipv4
IPv4 address type
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6
IPv6 address type
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Policies.Policy.Paths.Hops.RemoteAddress, self).__init__()
self.yang_name = "remote-address"
self.yang_parent_name = "hops"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('af_name', YLeaf(YType.enumeration, 'af-name')),
('ipv4', YLeaf(YType.str, 'ipv4')),
('ipv6', YLeaf(YType.str, 'ipv6')),
])
self.af_name = None
self.ipv4 = None
self.ipv6 = None
self._segment_path = lambda: "remote-address"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Policies.Policy.Paths.Hops.RemoteAddress, ['af_name', 'ipv4', 'ipv6'], name, value)
class OnDemandColors(Entity):
"""
On\-Demand Color database in XTC Agent
.. attribute:: on_demand_color
On Demand Color information
**type**\: list of :py:class:`OnDemandColor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.OnDemandColors.OnDemandColor>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.OnDemandColors, self).__init__()
self.yang_name = "on-demand-colors"
self.yang_parent_name = "xtc"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("on-demand-color", ("on_demand_color", Xtc.OnDemandColors.OnDemandColor))])
self._leafs = OrderedDict()
self.on_demand_color = YList(self)
self._segment_path = lambda: "on-demand-colors"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Xtc.OnDemandColors, [], name, value)
class OnDemandColor(Entity):
"""
On Demand Color information
.. attribute:: color (key)
Color
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: disjoint_path_info
Disjoint path information
**type**\: :py:class:`DisjointPathInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.OnDemandColors.OnDemandColor.DisjointPathInfo>`
.. attribute:: color_xr
Color
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.OnDemandColors.OnDemandColor, self).__init__()
self.yang_name = "on-demand-color"
self.yang_parent_name = "on-demand-colors"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['color']
self._child_container_classes = OrderedDict([("disjoint-path-info", ("disjoint_path_info", Xtc.OnDemandColors.OnDemandColor.DisjointPathInfo))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('color', YLeaf(YType.int32, 'color')),
('color_xr', YLeaf(YType.uint32, 'color-xr')),
])
self.color = None
self.color_xr = None
self.disjoint_path_info = Xtc.OnDemandColors.OnDemandColor.DisjointPathInfo()
self.disjoint_path_info.parent = self
self._children_name_map["disjoint_path_info"] = "disjoint-path-info"
self._children_yang_names.add("disjoint-path-info")
self._segment_path = lambda: "on-demand-color" + "[color='" + str(self.color) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/on-demand-colors/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Xtc.OnDemandColors.OnDemandColor, ['color', 'color_xr'], name, value)
class DisjointPathInfo(Entity):
"""
Disjoint path information
.. attribute:: disjointness_type
Disjointness type
**type**\: :py:class:`XtcDisjointness <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcDisjointness>`
.. attribute:: group_id
Group ID
**type**\: int
**range:** 0..4294967295
.. attribute:: sub_id
Sub ID
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.OnDemandColors.OnDemandColor.DisjointPathInfo, self).__init__()
self.yang_name = "disjoint-path-info"
self.yang_parent_name = "on-demand-color"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('disjointness_type', YLeaf(YType.enumeration, 'disjointness-type')),
('group_id', YLeaf(YType.uint32, 'group-id')),
('sub_id', YLeaf(YType.uint32, 'sub-id')),
])
self.disjointness_type = None
self.group_id = None
self.sub_id = None
self._segment_path = lambda: "disjoint-path-info"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.OnDemandColors.OnDemandColor.DisjointPathInfo, ['disjointness_type', 'group_id', 'sub_id'], name, value)
class Forwarding(Entity):
"""
Forwarding information
.. attribute:: policy_forwardings
Forwarding information for policies
**type**\: :py:class:`PolicyForwardings <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Forwarding.PolicyForwardings>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Forwarding, self).__init__()
self.yang_name = "forwarding"
self.yang_parent_name = "xtc"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("policy-forwardings", ("policy_forwardings", Xtc.Forwarding.PolicyForwardings))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.policy_forwardings = Xtc.Forwarding.PolicyForwardings()
self.policy_forwardings.parent = self
self._children_name_map["policy_forwardings"] = "policy-forwardings"
self._children_yang_names.add("policy-forwardings")
self._segment_path = lambda: "forwarding"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/%s" % self._segment_path()
class PolicyForwardings(Entity):
"""
Forwarding information for policies
.. attribute:: policy_forwarding
Forwarding information for the policy
**type**\: list of :py:class:`PolicyForwarding <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Forwarding.PolicyForwardings.PolicyForwarding>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Forwarding.PolicyForwardings, self).__init__()
self.yang_name = "policy-forwardings"
self.yang_parent_name = "forwarding"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("policy-forwarding", ("policy_forwarding", Xtc.Forwarding.PolicyForwardings.PolicyForwarding))])
self._leafs = OrderedDict()
self.policy_forwarding = YList(self)
self._segment_path = lambda: "policy-forwardings"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/forwarding/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Forwarding.PolicyForwardings, [], name, value)
class PolicyForwarding(Entity):
"""
Forwarding information for the policy
.. attribute:: name (key)
Policy Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: policy_name
Policy name
**type**\: str
.. attribute:: is_local_label_valid
Is local label valid and allocated?
**type**\: bool
.. attribute:: local_label
Local label for SR MPLS policy
**type**\: int
**range:** 0..4294967295
.. attribute:: are_stats_valid
Are policy stats valid?
**type**\: bool
.. attribute:: forwarding_stats_pkts
Number of packets forwarded
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: forwarding_stats_bytes
Number of bytes forwarded
**type**\: int
**range:** 0..18446744073709551615
**units**\: byte
.. attribute:: paths
Forwarding paths
**type**\: list of :py:class:`Paths <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.Forwarding.PolicyForwardings.PolicyForwarding.Paths>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Forwarding.PolicyForwardings.PolicyForwarding, self).__init__()
self.yang_name = "policy-forwarding"
self.yang_parent_name = "policy-forwardings"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("paths", ("paths", Xtc.Forwarding.PolicyForwardings.PolicyForwarding.Paths))])
self._leafs = OrderedDict([
('name', YLeaf(YType.str, 'name')),
('policy_name', YLeaf(YType.str, 'policy-name')),
('is_local_label_valid', YLeaf(YType.boolean, 'is-local-label-valid')),
('local_label', YLeaf(YType.uint32, 'local-label')),
('are_stats_valid', YLeaf(YType.boolean, 'are-stats-valid')),
('forwarding_stats_pkts', YLeaf(YType.uint64, 'forwarding-stats-pkts')),
('forwarding_stats_bytes', YLeaf(YType.uint64, 'forwarding-stats-bytes')),
])
self.name = None
self.policy_name = None
self.is_local_label_valid = None
self.local_label = None
self.are_stats_valid = None
self.forwarding_stats_pkts = None
self.forwarding_stats_bytes = None
self.paths = YList(self)
self._segment_path = lambda: "policy-forwarding" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/forwarding/policy-forwardings/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Forwarding.PolicyForwardings.PolicyForwarding, ['name', 'policy_name', 'is_local_label_valid', 'local_label', 'are_stats_valid', 'forwarding_stats_pkts', 'forwarding_stats_bytes'], name, value)
class Paths(Entity):
"""
Forwarding paths
.. attribute:: outgoing_interface
Outgoing interface handle
**type**\: str
**pattern:** [a\-zA\-Z0\-9./\-]+
.. attribute:: next_hop_ipv4
IPv4 Next Hop
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: next_hop_table_id
Table ID for nexthop address
**type**\: int
**range:** 0..4294967295
.. attribute:: is_protected
Is this path protected ?
**type**\: bool
.. attribute:: is_pure_bkup
Is this path a pure backup ?
**type**\: bool
.. attribute:: load_metric
Path's load metric for load balancing
**type**\: int
**range:** 0..4294967295
.. attribute:: path_id
path Id
**type**\: int
**range:** 0..255
.. attribute:: bkup_path_id
Backup path Id
**type**\: int
**range:** 0..255
.. attribute:: segment_list_name
Associated segment\-list
**type**\: str
.. attribute:: are_stats_valid
Are per path stats valid?
**type**\: bool
.. attribute:: forwarding_stats_pkts
Number of packets forwarded on this path
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: forwarding_stats_bytes
Number of bytes forwarded on this path
**type**\: int
**range:** 0..18446744073709551615
**units**\: byte
.. attribute:: label_stack
Path outgoing labels
**type**\: list of int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.Forwarding.PolicyForwardings.PolicyForwarding.Paths, self).__init__()
self.yang_name = "paths"
self.yang_parent_name = "policy-forwarding"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('outgoing_interface', YLeaf(YType.str, 'outgoing-interface')),
('next_hop_ipv4', YLeaf(YType.str, 'next-hop-ipv4')),
('next_hop_table_id', YLeaf(YType.uint32, 'next-hop-table-id')),
('is_protected', YLeaf(YType.boolean, 'is-protected')),
('is_pure_bkup', YLeaf(YType.boolean, 'is-pure-bkup')),
('load_metric', YLeaf(YType.uint32, 'load-metric')),
('path_id', YLeaf(YType.uint8, 'path-id')),
('bkup_path_id', YLeaf(YType.uint8, 'bkup-path-id')),
('segment_list_name', YLeaf(YType.str, 'segment-list-name')),
('are_stats_valid', YLeaf(YType.boolean, 'are-stats-valid')),
('forwarding_stats_pkts', YLeaf(YType.uint64, 'forwarding-stats-pkts')),
('forwarding_stats_bytes', YLeaf(YType.uint64, 'forwarding-stats-bytes')),
('label_stack', YLeafList(YType.uint32, 'label-stack')),
])
self.outgoing_interface = None
self.next_hop_ipv4 = None
self.next_hop_table_id = None
self.is_protected = None
self.is_pure_bkup = None
self.load_metric = None
self.path_id = None
self.bkup_path_id = None
self.segment_list_name = None
self.are_stats_valid = None
self.forwarding_stats_pkts = None
self.forwarding_stats_bytes = None
self.label_stack = []
self._segment_path = lambda: "paths"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.Forwarding.PolicyForwardings.PolicyForwarding.Paths, ['outgoing_interface', 'next_hop_ipv4', 'next_hop_table_id', 'is_protected', 'is_pure_bkup', 'load_metric', 'path_id', 'bkup_path_id', 'segment_list_name', 'are_stats_valid', 'forwarding_stats_pkts', 'forwarding_stats_bytes', 'label_stack'], name, value)
class TopologySummary(Entity):
"""
Node summary database
.. attribute:: nodes
Number of nodes
**type**\: int
**range:** 0..4294967295
.. attribute:: prefixes
Number of prefixes
**type**\: int
**range:** 0..4294967295
.. attribute:: prefix_sids
Number of prefix SIDs
**type**\: int
**range:** 0..4294967295
.. attribute:: links
Number of links
**type**\: int
**range:** 0..4294967295
.. attribute:: adjacency_sids
Number of adjacency SIDs
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologySummary, self).__init__()
self.yang_name = "topology-summary"
self.yang_parent_name = "xtc"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('nodes', YLeaf(YType.uint32, 'nodes')),
('prefixes', YLeaf(YType.uint32, 'prefixes')),
('prefix_sids', YLeaf(YType.uint32, 'prefix-sids')),
('links', YLeaf(YType.uint32, 'links')),
('adjacency_sids', YLeaf(YType.uint32, 'adjacency-sids')),
])
self.nodes = None
self.prefixes = None
self.prefix_sids = None
self.links = None
self.adjacency_sids = None
self._segment_path = lambda: "topology-summary"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologySummary, ['nodes', 'prefixes', 'prefix_sids', 'links', 'adjacency_sids'], name, value)
class TopologyNodes(Entity):
"""
Node database in XTC Agent
.. attribute:: topology_node
Node information
**type**\: list of :py:class:`TopologyNode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes, self).__init__()
self.yang_name = "topology-nodes"
self.yang_parent_name = "xtc"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("topology-node", ("topology_node", Xtc.TopologyNodes.TopologyNode))])
self._leafs = OrderedDict()
self.topology_node = YList(self)
self._segment_path = lambda: "topology-nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes, [], name, value)
class TopologyNode(Entity):
"""
Node information
.. attribute:: node_identifier (key)
Node Identifier
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: node_protocol_identifier
Node protocol identifier
**type**\: :py:class:`NodeProtocolIdentifier <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier>`
.. attribute:: node_identifier_xr
Node identifier
**type**\: int
**range:** 0..4294967295
.. attribute:: overload
Node Overload Bit
**type**\: bool
.. attribute:: prefix_sid
Prefix SIDs
**type**\: list of :py:class:`PrefixSid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.PrefixSid>`
.. attribute:: ipv4_link
IPv4 Link information
**type**\: list of :py:class:`Ipv4Link <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link>`
.. attribute:: ipv6_link
IPv6 Link information
**type**\: list of :py:class:`Ipv6Link <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode, self).__init__()
self.yang_name = "topology-node"
self.yang_parent_name = "topology-nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_identifier']
self._child_container_classes = OrderedDict([("node-protocol-identifier", ("node_protocol_identifier", Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier))])
self._child_list_classes = OrderedDict([("prefix-sid", ("prefix_sid", Xtc.TopologyNodes.TopologyNode.PrefixSid)), ("ipv4-link", ("ipv4_link", Xtc.TopologyNodes.TopologyNode.Ipv4Link)), ("ipv6-link", ("ipv6_link", Xtc.TopologyNodes.TopologyNode.Ipv6Link))])
self._leafs = OrderedDict([
('node_identifier', YLeaf(YType.int32, 'node-identifier')),
('node_identifier_xr', YLeaf(YType.uint32, 'node-identifier-xr')),
('overload', YLeaf(YType.boolean, 'overload')),
])
self.node_identifier = None
self.node_identifier_xr = None
self.overload = None
self.node_protocol_identifier = Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier()
self.node_protocol_identifier.parent = self
self._children_name_map["node_protocol_identifier"] = "node-protocol-identifier"
self._children_yang_names.add("node-protocol-identifier")
self.prefix_sid = YList(self)
self.ipv4_link = YList(self)
self.ipv6_link = YList(self)
self._segment_path = lambda: "topology-node" + "[node-identifier='" + str(self.node_identifier) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/topology-nodes/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode, ['node_identifier', 'node_identifier_xr', 'overload'], name, value)
class NodeProtocolIdentifier(Entity):
"""
Node protocol identifier
.. attribute:: node_name
Node Name
**type**\: str
.. attribute:: ipv4_bgp_router_id_set
True if IPv4 BGP router ID is set
**type**\: bool
.. attribute:: ipv4_bgp_router_id
IPv4 TE router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv4te_router_id_set
True if IPv4 TE router ID is set
**type**\: bool
.. attribute:: ipv4te_router_id
IPv4 BGP router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: igp_information
IGP information
**type**\: list of :py:class:`IgpInformation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier, self).__init__()
self.yang_name = "node-protocol-identifier"
self.yang_parent_name = "topology-node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("igp-information", ("igp_information", Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation))])
self._leafs = OrderedDict([
('node_name', YLeaf(YType.str, 'node-name')),
('ipv4_bgp_router_id_set', YLeaf(YType.boolean, 'ipv4-bgp-router-id-set')),
('ipv4_bgp_router_id', YLeaf(YType.str, 'ipv4-bgp-router-id')),
('ipv4te_router_id_set', YLeaf(YType.boolean, 'ipv4te-router-id-set')),
('ipv4te_router_id', YLeaf(YType.str, 'ipv4te-router-id')),
])
self.node_name = None
self.ipv4_bgp_router_id_set = None
self.ipv4_bgp_router_id = None
self.ipv4te_router_id_set = None
self.ipv4te_router_id = None
self.igp_information = YList(self)
self._segment_path = lambda: "node-protocol-identifier"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier, ['node_name', 'ipv4_bgp_router_id_set', 'ipv4_bgp_router_id', 'ipv4te_router_id_set', 'ipv4te_router_id'], name, value)
class IgpInformation(Entity):
"""
IGP information
.. attribute:: igp
IGP\-specific information
**type**\: :py:class:`Igp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp>`
.. attribute:: domain_identifier
Domain identifier
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation, self).__init__()
self.yang_name = "igp-information"
self.yang_parent_name = "node-protocol-identifier"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("igp", ("igp", Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('domain_identifier', YLeaf(YType.uint64, 'domain-identifier')),
])
self.domain_identifier = None
self.igp = Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp()
self.igp.parent = self
self._children_name_map["igp"] = "igp"
self._children_yang_names.add("igp")
self._segment_path = lambda: "igp-information"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation, ['domain_identifier'], name, value)
class Igp(Entity):
"""
IGP\-specific information
.. attribute:: isis
ISIS information
**type**\: :py:class:`Isis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Isis>`
.. attribute:: ospf
OSPF information
**type**\: :py:class:`Ospf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Ospf>`
.. attribute:: bgp
BGP information
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Bgp>`
.. attribute:: igp_id
IGP ID
**type**\: :py:class:`XtcIgpInfoId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcIgpInfoId>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp, self).__init__()
self.yang_name = "igp"
self.yang_parent_name = "igp-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("isis", ("isis", Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Isis)), ("ospf", ("ospf", Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Ospf)), ("bgp", ("bgp", Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Bgp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('igp_id', YLeaf(YType.enumeration, 'igp-id')),
])
self.igp_id = None
self.isis = Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Isis()
self.isis.parent = self
self._children_name_map["isis"] = "isis"
self._children_yang_names.add("isis")
self.ospf = Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Ospf()
self.ospf.parent = self
self._children_name_map["ospf"] = "ospf"
self._children_yang_names.add("ospf")
self.bgp = Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Bgp()
self.bgp.parent = self
self._children_name_map["bgp"] = "bgp"
self._children_yang_names.add("bgp")
self._segment_path = lambda: "igp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp, ['igp_id'], name, value)
class Isis(Entity):
"""
ISIS information
.. attribute:: system_id
ISIS system ID
**type**\: str
.. attribute:: level
ISIS level
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Isis, self).__init__()
self.yang_name = "isis"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('system_id', YLeaf(YType.str, 'system-id')),
('level', YLeaf(YType.uint32, 'level')),
])
self.system_id = None
self.level = None
self._segment_path = lambda: "isis"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Isis, ['system_id', 'level'], name, value)
class Ospf(Entity):
"""
OSPF information
.. attribute:: router_id
OSPF router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: area
OSPF area
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Ospf, self).__init__()
self.yang_name = "ospf"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
('area', YLeaf(YType.uint32, 'area')),
])
self.router_id = None
self.area = None
self._segment_path = lambda: "ospf"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Ospf, ['router_id', 'area'], name, value)
class Bgp(Entity):
"""
BGP information
.. attribute:: router_id
BGP router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Bgp, self).__init__()
self.yang_name = "bgp"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
])
self.router_id = None
self._segment_path = lambda: "bgp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.NodeProtocolIdentifier.IgpInformation.Igp.Bgp, ['router_id'], name, value)
class PrefixSid(Entity):
"""
Prefix SIDs
.. attribute:: sid_prefix
Prefix
**type**\: :py:class:`SidPrefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.PrefixSid.SidPrefix>`
.. attribute:: sid_type
SID Type
**type**\: :py:class:`XtcSid1 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcSid1>`
.. attribute:: algorithm
Prefix\-SID algorithm number
**type**\: int
**range:** 0..4294967295
.. attribute:: mpls_label
MPLS Label
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.PrefixSid, self).__init__()
self.yang_name = "prefix-sid"
self.yang_parent_name = "topology-node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("sid-prefix", ("sid_prefix", Xtc.TopologyNodes.TopologyNode.PrefixSid.SidPrefix))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('sid_type', YLeaf(YType.enumeration, 'sid-type')),
('algorithm', YLeaf(YType.uint32, 'algorithm')),
('mpls_label', YLeaf(YType.uint32, 'mpls-label')),
])
self.sid_type = None
self.algorithm = None
self.mpls_label = None
self.sid_prefix = Xtc.TopologyNodes.TopologyNode.PrefixSid.SidPrefix()
self.sid_prefix.parent = self
self._children_name_map["sid_prefix"] = "sid-prefix"
self._children_yang_names.add("sid-prefix")
self._segment_path = lambda: "prefix-sid"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.PrefixSid, ['sid_type', 'algorithm', 'mpls_label'], name, value)
class SidPrefix(Entity):
"""
Prefix
.. attribute:: af_name
AFName
**type**\: :py:class:`XtcAfId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcAfId>`
.. attribute:: ipv4
IPv4 address type
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6
IPv6 address type
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.PrefixSid.SidPrefix, self).__init__()
self.yang_name = "sid-prefix"
self.yang_parent_name = "prefix-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('af_name', YLeaf(YType.enumeration, 'af-name')),
('ipv4', YLeaf(YType.str, 'ipv4')),
('ipv6', YLeaf(YType.str, 'ipv6')),
])
self.af_name = None
self.ipv4 = None
self.ipv6 = None
self._segment_path = lambda: "sid-prefix"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.PrefixSid.SidPrefix, ['af_name', 'ipv4', 'ipv6'], name, value)
class Ipv4Link(Entity):
"""
IPv4 Link information
.. attribute:: local_igp_information
Local node IGP information
**type**\: :py:class:`LocalIgpInformation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation>`
.. attribute:: remote_node_protocol_identifier
Remote node protocol identifier
**type**\: :py:class:`RemoteNodeProtocolIdentifier <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier>`
.. attribute:: local_ipv4_address
Local IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: remote_ipv4_address
Remote IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: igp_metric
IGP Metric
**type**\: int
**range:** 0..4294967295
.. attribute:: te_metric
TE Metric
**type**\: int
**range:** 0..4294967295
.. attribute:: maximum_link_bandwidth
Max link bandwidth
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: max_reservable_bandwidth
Max Reservable bandwidth
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: administrative_groups
Link admin\-groups
**type**\: int
**range:** 0..4294967295
.. attribute:: srlgs
SRLG Values
**type**\: list of int
**range:** 0..4294967295
.. attribute:: adjacency_sid
Adjacency SIDs
**type**\: list of :py:class:`AdjacencySid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.AdjacencySid>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link, self).__init__()
self.yang_name = "ipv4-link"
self.yang_parent_name = "topology-node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("local-igp-information", ("local_igp_information", Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation)), ("remote-node-protocol-identifier", ("remote_node_protocol_identifier", Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier))])
self._child_list_classes = OrderedDict([("adjacency-sid", ("adjacency_sid", Xtc.TopologyNodes.TopologyNode.Ipv4Link.AdjacencySid))])
self._leafs = OrderedDict([
('local_ipv4_address', YLeaf(YType.str, 'local-ipv4-address')),
('remote_ipv4_address', YLeaf(YType.str, 'remote-ipv4-address')),
('igp_metric', YLeaf(YType.uint32, 'igp-metric')),
('te_metric', YLeaf(YType.uint32, 'te-metric')),
('maximum_link_bandwidth', YLeaf(YType.uint64, 'maximum-link-bandwidth')),
('max_reservable_bandwidth', YLeaf(YType.uint64, 'max-reservable-bandwidth')),
('administrative_groups', YLeaf(YType.uint32, 'administrative-groups')),
('srlgs', YLeafList(YType.uint32, 'srlgs')),
])
self.local_ipv4_address = None
self.remote_ipv4_address = None
self.igp_metric = None
self.te_metric = None
self.maximum_link_bandwidth = None
self.max_reservable_bandwidth = None
self.administrative_groups = None
self.srlgs = []
self.local_igp_information = Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation()
self.local_igp_information.parent = self
self._children_name_map["local_igp_information"] = "local-igp-information"
self._children_yang_names.add("local-igp-information")
self.remote_node_protocol_identifier = Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier()
self.remote_node_protocol_identifier.parent = self
self._children_name_map["remote_node_protocol_identifier"] = "remote-node-protocol-identifier"
self._children_yang_names.add("remote-node-protocol-identifier")
self.adjacency_sid = YList(self)
self._segment_path = lambda: "ipv4-link"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link, ['local_ipv4_address', 'remote_ipv4_address', 'igp_metric', 'te_metric', 'maximum_link_bandwidth', 'max_reservable_bandwidth', 'administrative_groups', 'srlgs'], name, value)
class LocalIgpInformation(Entity):
"""
Local node IGP information
.. attribute:: igp
IGP\-specific information
**type**\: :py:class:`Igp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp>`
.. attribute:: domain_identifier
Domain identifier
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation, self).__init__()
self.yang_name = "local-igp-information"
self.yang_parent_name = "ipv4-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("igp", ("igp", Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('domain_identifier', YLeaf(YType.uint64, 'domain-identifier')),
])
self.domain_identifier = None
self.igp = Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp()
self.igp.parent = self
self._children_name_map["igp"] = "igp"
self._children_yang_names.add("igp")
self._segment_path = lambda: "local-igp-information"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation, ['domain_identifier'], name, value)
class Igp(Entity):
"""
IGP\-specific information
.. attribute:: isis
ISIS information
**type**\: :py:class:`Isis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Isis>`
.. attribute:: ospf
OSPF information
**type**\: :py:class:`Ospf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Ospf>`
.. attribute:: bgp
BGP information
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Bgp>`
.. attribute:: igp_id
IGP ID
**type**\: :py:class:`XtcIgpInfoId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcIgpInfoId>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp, self).__init__()
self.yang_name = "igp"
self.yang_parent_name = "local-igp-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("isis", ("isis", Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Isis)), ("ospf", ("ospf", Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Ospf)), ("bgp", ("bgp", Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Bgp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('igp_id', YLeaf(YType.enumeration, 'igp-id')),
])
self.igp_id = None
self.isis = Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Isis()
self.isis.parent = self
self._children_name_map["isis"] = "isis"
self._children_yang_names.add("isis")
self.ospf = Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Ospf()
self.ospf.parent = self
self._children_name_map["ospf"] = "ospf"
self._children_yang_names.add("ospf")
self.bgp = Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Bgp()
self.bgp.parent = self
self._children_name_map["bgp"] = "bgp"
self._children_yang_names.add("bgp")
self._segment_path = lambda: "igp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp, ['igp_id'], name, value)
class Isis(Entity):
"""
ISIS information
.. attribute:: system_id
ISIS system ID
**type**\: str
.. attribute:: level
ISIS level
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Isis, self).__init__()
self.yang_name = "isis"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('system_id', YLeaf(YType.str, 'system-id')),
('level', YLeaf(YType.uint32, 'level')),
])
self.system_id = None
self.level = None
self._segment_path = lambda: "isis"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Isis, ['system_id', 'level'], name, value)
class Ospf(Entity):
"""
OSPF information
.. attribute:: router_id
OSPF router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: area
OSPF area
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Ospf, self).__init__()
self.yang_name = "ospf"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
('area', YLeaf(YType.uint32, 'area')),
])
self.router_id = None
self.area = None
self._segment_path = lambda: "ospf"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Ospf, ['router_id', 'area'], name, value)
class Bgp(Entity):
"""
BGP information
.. attribute:: router_id
BGP router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Bgp, self).__init__()
self.yang_name = "bgp"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
])
self.router_id = None
self._segment_path = lambda: "bgp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.LocalIgpInformation.Igp.Bgp, ['router_id'], name, value)
class RemoteNodeProtocolIdentifier(Entity):
"""
Remote node protocol identifier
.. attribute:: node_name
Node Name
**type**\: str
.. attribute:: ipv4_bgp_router_id_set
True if IPv4 BGP router ID is set
**type**\: bool
.. attribute:: ipv4_bgp_router_id
IPv4 TE router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv4te_router_id_set
True if IPv4 TE router ID is set
**type**\: bool
.. attribute:: ipv4te_router_id
IPv4 BGP router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: igp_information
IGP information
**type**\: list of :py:class:`IgpInformation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier, self).__init__()
self.yang_name = "remote-node-protocol-identifier"
self.yang_parent_name = "ipv4-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("igp-information", ("igp_information", Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation))])
self._leafs = OrderedDict([
('node_name', YLeaf(YType.str, 'node-name')),
('ipv4_bgp_router_id_set', YLeaf(YType.boolean, 'ipv4-bgp-router-id-set')),
('ipv4_bgp_router_id', YLeaf(YType.str, 'ipv4-bgp-router-id')),
('ipv4te_router_id_set', YLeaf(YType.boolean, 'ipv4te-router-id-set')),
('ipv4te_router_id', YLeaf(YType.str, 'ipv4te-router-id')),
])
self.node_name = None
self.ipv4_bgp_router_id_set = None
self.ipv4_bgp_router_id = None
self.ipv4te_router_id_set = None
self.ipv4te_router_id = None
self.igp_information = YList(self)
self._segment_path = lambda: "remote-node-protocol-identifier"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier, ['node_name', 'ipv4_bgp_router_id_set', 'ipv4_bgp_router_id', 'ipv4te_router_id_set', 'ipv4te_router_id'], name, value)
class IgpInformation(Entity):
"""
IGP information
.. attribute:: igp
IGP\-specific information
**type**\: :py:class:`Igp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp>`
.. attribute:: domain_identifier
Domain identifier
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation, self).__init__()
self.yang_name = "igp-information"
self.yang_parent_name = "remote-node-protocol-identifier"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("igp", ("igp", Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('domain_identifier', YLeaf(YType.uint64, 'domain-identifier')),
])
self.domain_identifier = None
self.igp = Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp()
self.igp.parent = self
self._children_name_map["igp"] = "igp"
self._children_yang_names.add("igp")
self._segment_path = lambda: "igp-information"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation, ['domain_identifier'], name, value)
class Igp(Entity):
"""
IGP\-specific information
.. attribute:: isis
ISIS information
**type**\: :py:class:`Isis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Isis>`
.. attribute:: ospf
OSPF information
**type**\: :py:class:`Ospf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Ospf>`
.. attribute:: bgp
BGP information
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Bgp>`
.. attribute:: igp_id
IGP ID
**type**\: :py:class:`XtcIgpInfoId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcIgpInfoId>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp, self).__init__()
self.yang_name = "igp"
self.yang_parent_name = "igp-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("isis", ("isis", Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Isis)), ("ospf", ("ospf", Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Ospf)), ("bgp", ("bgp", Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Bgp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('igp_id', YLeaf(YType.enumeration, 'igp-id')),
])
self.igp_id = None
self.isis = Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Isis()
self.isis.parent = self
self._children_name_map["isis"] = "isis"
self._children_yang_names.add("isis")
self.ospf = Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Ospf()
self.ospf.parent = self
self._children_name_map["ospf"] = "ospf"
self._children_yang_names.add("ospf")
self.bgp = Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Bgp()
self.bgp.parent = self
self._children_name_map["bgp"] = "bgp"
self._children_yang_names.add("bgp")
self._segment_path = lambda: "igp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp, ['igp_id'], name, value)
class Isis(Entity):
"""
ISIS information
.. attribute:: system_id
ISIS system ID
**type**\: str
.. attribute:: level
ISIS level
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Isis, self).__init__()
self.yang_name = "isis"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('system_id', YLeaf(YType.str, 'system-id')),
('level', YLeaf(YType.uint32, 'level')),
])
self.system_id = None
self.level = None
self._segment_path = lambda: "isis"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Isis, ['system_id', 'level'], name, value)
class Ospf(Entity):
"""
OSPF information
.. attribute:: router_id
OSPF router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: area
OSPF area
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Ospf, self).__init__()
self.yang_name = "ospf"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
('area', YLeaf(YType.uint32, 'area')),
])
self.router_id = None
self.area = None
self._segment_path = lambda: "ospf"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Ospf, ['router_id', 'area'], name, value)
class Bgp(Entity):
"""
BGP information
.. attribute:: router_id
BGP router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Bgp, self).__init__()
self.yang_name = "bgp"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
])
self.router_id = None
self._segment_path = lambda: "bgp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Bgp, ['router_id'], name, value)
class AdjacencySid(Entity):
"""
Adjacency SIDs
.. attribute:: sid_prefix
Prefix
**type**\: :py:class:`SidPrefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv4Link.AdjacencySid.SidPrefix>`
.. attribute:: sid_type
SID Type
**type**\: :py:class:`XtcSid1 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcSid1>`
.. attribute:: algorithm
Prefix\-SID algorithm number
**type**\: int
**range:** 0..4294967295
.. attribute:: mpls_label
MPLS Label
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.AdjacencySid, self).__init__()
self.yang_name = "adjacency-sid"
self.yang_parent_name = "ipv4-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("sid-prefix", ("sid_prefix", Xtc.TopologyNodes.TopologyNode.Ipv4Link.AdjacencySid.SidPrefix))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('sid_type', YLeaf(YType.enumeration, 'sid-type')),
('algorithm', YLeaf(YType.uint32, 'algorithm')),
('mpls_label', YLeaf(YType.uint32, 'mpls-label')),
])
self.sid_type = None
self.algorithm = None
self.mpls_label = None
self.sid_prefix = Xtc.TopologyNodes.TopologyNode.Ipv4Link.AdjacencySid.SidPrefix()
self.sid_prefix.parent = self
self._children_name_map["sid_prefix"] = "sid-prefix"
self._children_yang_names.add("sid-prefix")
self._segment_path = lambda: "adjacency-sid"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.AdjacencySid, ['sid_type', 'algorithm', 'mpls_label'], name, value)
class SidPrefix(Entity):
"""
Prefix
.. attribute:: af_name
AFName
**type**\: :py:class:`XtcAfId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcAfId>`
.. attribute:: ipv4
IPv4 address type
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6
IPv6 address type
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv4Link.AdjacencySid.SidPrefix, self).__init__()
self.yang_name = "sid-prefix"
self.yang_parent_name = "adjacency-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('af_name', YLeaf(YType.enumeration, 'af-name')),
('ipv4', YLeaf(YType.str, 'ipv4')),
('ipv6', YLeaf(YType.str, 'ipv6')),
])
self.af_name = None
self.ipv4 = None
self.ipv6 = None
self._segment_path = lambda: "sid-prefix"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv4Link.AdjacencySid.SidPrefix, ['af_name', 'ipv4', 'ipv6'], name, value)
class Ipv6Link(Entity):
"""
IPv6 Link information
.. attribute:: local_igp_information
Local node IGP information
**type**\: :py:class:`LocalIgpInformation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation>`
.. attribute:: remote_node_protocol_identifier
Remote node protocol identifier
**type**\: :py:class:`RemoteNodeProtocolIdentifier <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier>`
.. attribute:: local_ipv6_address
Local IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: remote_ipv6_address
Remote IPv6 address
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
.. attribute:: igp_metric
IGP Metric
**type**\: int
**range:** 0..4294967295
.. attribute:: te_metric
TE Metric
**type**\: int
**range:** 0..4294967295
.. attribute:: maximum_link_bandwidth
Max link bandwidth
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: max_reservable_bandwidth
Max Reservable bandwidth
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: adjacency_sid
Adjacency SIDs
**type**\: list of :py:class:`AdjacencySid <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.AdjacencySid>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link, self).__init__()
self.yang_name = "ipv6-link"
self.yang_parent_name = "topology-node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("local-igp-information", ("local_igp_information", Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation)), ("remote-node-protocol-identifier", ("remote_node_protocol_identifier", Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier))])
self._child_list_classes = OrderedDict([("adjacency-sid", ("adjacency_sid", Xtc.TopologyNodes.TopologyNode.Ipv6Link.AdjacencySid))])
self._leafs = OrderedDict([
('local_ipv6_address', YLeaf(YType.str, 'local-ipv6-address')),
('remote_ipv6_address', YLeaf(YType.str, 'remote-ipv6-address')),
('igp_metric', YLeaf(YType.uint32, 'igp-metric')),
('te_metric', YLeaf(YType.uint32, 'te-metric')),
('maximum_link_bandwidth', YLeaf(YType.uint64, 'maximum-link-bandwidth')),
('max_reservable_bandwidth', YLeaf(YType.uint64, 'max-reservable-bandwidth')),
])
self.local_ipv6_address = None
self.remote_ipv6_address = None
self.igp_metric = None
self.te_metric = None
self.maximum_link_bandwidth = None
self.max_reservable_bandwidth = None
self.local_igp_information = Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation()
self.local_igp_information.parent = self
self._children_name_map["local_igp_information"] = "local-igp-information"
self._children_yang_names.add("local-igp-information")
self.remote_node_protocol_identifier = Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier()
self.remote_node_protocol_identifier.parent = self
self._children_name_map["remote_node_protocol_identifier"] = "remote-node-protocol-identifier"
self._children_yang_names.add("remote-node-protocol-identifier")
self.adjacency_sid = YList(self)
self._segment_path = lambda: "ipv6-link"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link, ['local_ipv6_address', 'remote_ipv6_address', 'igp_metric', 'te_metric', 'maximum_link_bandwidth', 'max_reservable_bandwidth'], name, value)
class LocalIgpInformation(Entity):
"""
Local node IGP information
.. attribute:: igp
IGP\-specific information
**type**\: :py:class:`Igp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp>`
.. attribute:: domain_identifier
Domain identifier
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation, self).__init__()
self.yang_name = "local-igp-information"
self.yang_parent_name = "ipv6-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("igp", ("igp", Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('domain_identifier', YLeaf(YType.uint64, 'domain-identifier')),
])
self.domain_identifier = None
self.igp = Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp()
self.igp.parent = self
self._children_name_map["igp"] = "igp"
self._children_yang_names.add("igp")
self._segment_path = lambda: "local-igp-information"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation, ['domain_identifier'], name, value)
class Igp(Entity):
"""
IGP\-specific information
.. attribute:: isis
ISIS information
**type**\: :py:class:`Isis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Isis>`
.. attribute:: ospf
OSPF information
**type**\: :py:class:`Ospf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Ospf>`
.. attribute:: bgp
BGP information
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Bgp>`
.. attribute:: igp_id
IGP ID
**type**\: :py:class:`XtcIgpInfoId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcIgpInfoId>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp, self).__init__()
self.yang_name = "igp"
self.yang_parent_name = "local-igp-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("isis", ("isis", Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Isis)), ("ospf", ("ospf", Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Ospf)), ("bgp", ("bgp", Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Bgp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('igp_id', YLeaf(YType.enumeration, 'igp-id')),
])
self.igp_id = None
self.isis = Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Isis()
self.isis.parent = self
self._children_name_map["isis"] = "isis"
self._children_yang_names.add("isis")
self.ospf = Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Ospf()
self.ospf.parent = self
self._children_name_map["ospf"] = "ospf"
self._children_yang_names.add("ospf")
self.bgp = Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Bgp()
self.bgp.parent = self
self._children_name_map["bgp"] = "bgp"
self._children_yang_names.add("bgp")
self._segment_path = lambda: "igp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp, ['igp_id'], name, value)
class Isis(Entity):
"""
ISIS information
.. attribute:: system_id
ISIS system ID
**type**\: str
.. attribute:: level
ISIS level
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Isis, self).__init__()
self.yang_name = "isis"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('system_id', YLeaf(YType.str, 'system-id')),
('level', YLeaf(YType.uint32, 'level')),
])
self.system_id = None
self.level = None
self._segment_path = lambda: "isis"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Isis, ['system_id', 'level'], name, value)
class Ospf(Entity):
"""
OSPF information
.. attribute:: router_id
OSPF router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: area
OSPF area
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Ospf, self).__init__()
self.yang_name = "ospf"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
('area', YLeaf(YType.uint32, 'area')),
])
self.router_id = None
self.area = None
self._segment_path = lambda: "ospf"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Ospf, ['router_id', 'area'], name, value)
class Bgp(Entity):
"""
BGP information
.. attribute:: router_id
BGP router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Bgp, self).__init__()
self.yang_name = "bgp"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
])
self.router_id = None
self._segment_path = lambda: "bgp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.LocalIgpInformation.Igp.Bgp, ['router_id'], name, value)
class RemoteNodeProtocolIdentifier(Entity):
"""
Remote node protocol identifier
.. attribute:: node_name
Node Name
**type**\: str
.. attribute:: ipv4_bgp_router_id_set
True if IPv4 BGP router ID is set
**type**\: bool
.. attribute:: ipv4_bgp_router_id
IPv4 TE router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv4te_router_id_set
True if IPv4 TE router ID is set
**type**\: bool
.. attribute:: ipv4te_router_id
IPv4 BGP router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: igp_information
IGP information
**type**\: list of :py:class:`IgpInformation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier, self).__init__()
self.yang_name = "remote-node-protocol-identifier"
self.yang_parent_name = "ipv6-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("igp-information", ("igp_information", Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation))])
self._leafs = OrderedDict([
('node_name', YLeaf(YType.str, 'node-name')),
('ipv4_bgp_router_id_set', YLeaf(YType.boolean, 'ipv4-bgp-router-id-set')),
('ipv4_bgp_router_id', YLeaf(YType.str, 'ipv4-bgp-router-id')),
('ipv4te_router_id_set', YLeaf(YType.boolean, 'ipv4te-router-id-set')),
('ipv4te_router_id', YLeaf(YType.str, 'ipv4te-router-id')),
])
self.node_name = None
self.ipv4_bgp_router_id_set = None
self.ipv4_bgp_router_id = None
self.ipv4te_router_id_set = None
self.ipv4te_router_id = None
self.igp_information = YList(self)
self._segment_path = lambda: "remote-node-protocol-identifier"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier, ['node_name', 'ipv4_bgp_router_id_set', 'ipv4_bgp_router_id', 'ipv4te_router_id_set', 'ipv4te_router_id'], name, value)
class IgpInformation(Entity):
"""
IGP information
.. attribute:: igp
IGP\-specific information
**type**\: :py:class:`Igp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp>`
.. attribute:: domain_identifier
Domain identifier
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation, self).__init__()
self.yang_name = "igp-information"
self.yang_parent_name = "remote-node-protocol-identifier"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("igp", ("igp", Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('domain_identifier', YLeaf(YType.uint64, 'domain-identifier')),
])
self.domain_identifier = None
self.igp = Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp()
self.igp.parent = self
self._children_name_map["igp"] = "igp"
self._children_yang_names.add("igp")
self._segment_path = lambda: "igp-information"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation, ['domain_identifier'], name, value)
class Igp(Entity):
"""
IGP\-specific information
.. attribute:: isis
ISIS information
**type**\: :py:class:`Isis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Isis>`
.. attribute:: ospf
OSPF information
**type**\: :py:class:`Ospf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Ospf>`
.. attribute:: bgp
BGP information
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Bgp>`
.. attribute:: igp_id
IGP ID
**type**\: :py:class:`XtcIgpInfoId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcIgpInfoId>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp, self).__init__()
self.yang_name = "igp"
self.yang_parent_name = "igp-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("isis", ("isis", Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Isis)), ("ospf", ("ospf", Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Ospf)), ("bgp", ("bgp", Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Bgp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('igp_id', YLeaf(YType.enumeration, 'igp-id')),
])
self.igp_id = None
self.isis = Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Isis()
self.isis.parent = self
self._children_name_map["isis"] = "isis"
self._children_yang_names.add("isis")
self.ospf = Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Ospf()
self.ospf.parent = self
self._children_name_map["ospf"] = "ospf"
self._children_yang_names.add("ospf")
self.bgp = Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Bgp()
self.bgp.parent = self
self._children_name_map["bgp"] = "bgp"
self._children_yang_names.add("bgp")
self._segment_path = lambda: "igp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp, ['igp_id'], name, value)
class Isis(Entity):
"""
ISIS information
.. attribute:: system_id
ISIS system ID
**type**\: str
.. attribute:: level
ISIS level
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Isis, self).__init__()
self.yang_name = "isis"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('system_id', YLeaf(YType.str, 'system-id')),
('level', YLeaf(YType.uint32, 'level')),
])
self.system_id = None
self.level = None
self._segment_path = lambda: "isis"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Isis, ['system_id', 'level'], name, value)
class Ospf(Entity):
"""
OSPF information
.. attribute:: router_id
OSPF router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: area
OSPF area
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Ospf, self).__init__()
self.yang_name = "ospf"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
('area', YLeaf(YType.uint32, 'area')),
])
self.router_id = None
self.area = None
self._segment_path = lambda: "ospf"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Ospf, ['router_id', 'area'], name, value)
class Bgp(Entity):
"""
BGP information
.. attribute:: router_id
BGP router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Bgp, self).__init__()
self.yang_name = "bgp"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
])
self.router_id = None
self._segment_path = lambda: "bgp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.RemoteNodeProtocolIdentifier.IgpInformation.Igp.Bgp, ['router_id'], name, value)
class AdjacencySid(Entity):
"""
Adjacency SIDs
.. attribute:: sid_prefix
Prefix
**type**\: :py:class:`SidPrefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.TopologyNodes.TopologyNode.Ipv6Link.AdjacencySid.SidPrefix>`
.. attribute:: sid_type
SID Type
**type**\: :py:class:`XtcSid1 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcSid1>`
.. attribute:: algorithm
Prefix\-SID algorithm number
**type**\: int
**range:** 0..4294967295
.. attribute:: mpls_label
MPLS Label
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.AdjacencySid, self).__init__()
self.yang_name = "adjacency-sid"
self.yang_parent_name = "ipv6-link"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("sid-prefix", ("sid_prefix", Xtc.TopologyNodes.TopologyNode.Ipv6Link.AdjacencySid.SidPrefix))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('sid_type', YLeaf(YType.enumeration, 'sid-type')),
('algorithm', YLeaf(YType.uint32, 'algorithm')),
('mpls_label', YLeaf(YType.uint32, 'mpls-label')),
])
self.sid_type = None
self.algorithm = None
self.mpls_label = None
self.sid_prefix = Xtc.TopologyNodes.TopologyNode.Ipv6Link.AdjacencySid.SidPrefix()
self.sid_prefix.parent = self
self._children_name_map["sid_prefix"] = "sid-prefix"
self._children_yang_names.add("sid-prefix")
self._segment_path = lambda: "adjacency-sid"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.AdjacencySid, ['sid_type', 'algorithm', 'mpls_label'], name, value)
class SidPrefix(Entity):
"""
Prefix
.. attribute:: af_name
AFName
**type**\: :py:class:`XtcAfId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcAfId>`
.. attribute:: ipv4
IPv4 address type
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6
IPv6 address type
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.TopologyNodes.TopologyNode.Ipv6Link.AdjacencySid.SidPrefix, self).__init__()
self.yang_name = "sid-prefix"
self.yang_parent_name = "adjacency-sid"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('af_name', YLeaf(YType.enumeration, 'af-name')),
('ipv4', YLeaf(YType.str, 'ipv4')),
('ipv6', YLeaf(YType.str, 'ipv6')),
])
self.af_name = None
self.ipv4 = None
self.ipv6 = None
self._segment_path = lambda: "sid-prefix"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.TopologyNodes.TopologyNode.Ipv6Link.AdjacencySid.SidPrefix, ['af_name', 'ipv4', 'ipv6'], name, value)
class PrefixInfos(Entity):
"""
Prefixes database in XTC Agent
.. attribute:: prefix_info
Prefix information
**type**\: list of :py:class:`PrefixInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.PrefixInfos.PrefixInfo>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.PrefixInfos, self).__init__()
self.yang_name = "prefix-infos"
self.yang_parent_name = "xtc"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("prefix-info", ("prefix_info", Xtc.PrefixInfos.PrefixInfo))])
self._leafs = OrderedDict()
self.prefix_info = YList(self)
self._segment_path = lambda: "prefix-infos"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Xtc.PrefixInfos, [], name, value)
class PrefixInfo(Entity):
"""
Prefix information
.. attribute:: node_identifier (key)
Node ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: node_protocol_identifier
Node protocol identifier
**type**\: :py:class:`NodeProtocolIdentifier <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier>`
.. attribute:: node_identifier_xr
Node identifier
**type**\: int
**range:** 0..4294967295
.. attribute:: address
Prefix address
**type**\: list of :py:class:`Address <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.PrefixInfos.PrefixInfo.Address>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.PrefixInfos.PrefixInfo, self).__init__()
self.yang_name = "prefix-info"
self.yang_parent_name = "prefix-infos"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_identifier']
self._child_container_classes = OrderedDict([("node-protocol-identifier", ("node_protocol_identifier", Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier))])
self._child_list_classes = OrderedDict([("address", ("address", Xtc.PrefixInfos.PrefixInfo.Address))])
self._leafs = OrderedDict([
('node_identifier', YLeaf(YType.int32, 'node-identifier')),
('node_identifier_xr', YLeaf(YType.uint32, 'node-identifier-xr')),
])
self.node_identifier = None
self.node_identifier_xr = None
self.node_protocol_identifier = Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier()
self.node_protocol_identifier.parent = self
self._children_name_map["node_protocol_identifier"] = "node-protocol-identifier"
self._children_yang_names.add("node-protocol-identifier")
self.address = YList(self)
self._segment_path = lambda: "prefix-info" + "[node-identifier='" + str(self.node_identifier) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-xtc-agent-oper:xtc/prefix-infos/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(Xtc.PrefixInfos.PrefixInfo, ['node_identifier', 'node_identifier_xr'], name, value)
class NodeProtocolIdentifier(Entity):
"""
Node protocol identifier
.. attribute:: node_name
Node Name
**type**\: str
.. attribute:: ipv4_bgp_router_id_set
True if IPv4 BGP router ID is set
**type**\: bool
.. attribute:: ipv4_bgp_router_id
IPv4 TE router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv4te_router_id_set
True if IPv4 TE router ID is set
**type**\: bool
.. attribute:: ipv4te_router_id
IPv4 BGP router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: igp_information
IGP information
**type**\: list of :py:class:`IgpInformation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier, self).__init__()
self.yang_name = "node-protocol-identifier"
self.yang_parent_name = "prefix-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("igp-information", ("igp_information", Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation))])
self._leafs = OrderedDict([
('node_name', YLeaf(YType.str, 'node-name')),
('ipv4_bgp_router_id_set', YLeaf(YType.boolean, 'ipv4-bgp-router-id-set')),
('ipv4_bgp_router_id', YLeaf(YType.str, 'ipv4-bgp-router-id')),
('ipv4te_router_id_set', YLeaf(YType.boolean, 'ipv4te-router-id-set')),
('ipv4te_router_id', YLeaf(YType.str, 'ipv4te-router-id')),
])
self.node_name = None
self.ipv4_bgp_router_id_set = None
self.ipv4_bgp_router_id = None
self.ipv4te_router_id_set = None
self.ipv4te_router_id = None
self.igp_information = YList(self)
self._segment_path = lambda: "node-protocol-identifier"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier, ['node_name', 'ipv4_bgp_router_id_set', 'ipv4_bgp_router_id', 'ipv4te_router_id_set', 'ipv4te_router_id'], name, value)
class IgpInformation(Entity):
"""
IGP information
.. attribute:: igp
IGP\-specific information
**type**\: :py:class:`Igp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp>`
.. attribute:: domain_identifier
Domain identifier
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation, self).__init__()
self.yang_name = "igp-information"
self.yang_parent_name = "node-protocol-identifier"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("igp", ("igp", Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('domain_identifier', YLeaf(YType.uint64, 'domain-identifier')),
])
self.domain_identifier = None
self.igp = Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp()
self.igp.parent = self
self._children_name_map["igp"] = "igp"
self._children_yang_names.add("igp")
self._segment_path = lambda: "igp-information"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation, ['domain_identifier'], name, value)
class Igp(Entity):
"""
IGP\-specific information
.. attribute:: isis
ISIS information
**type**\: :py:class:`Isis <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Isis>`
.. attribute:: ospf
OSPF information
**type**\: :py:class:`Ospf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Ospf>`
.. attribute:: bgp
BGP information
**type**\: :py:class:`Bgp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Bgp>`
.. attribute:: igp_id
IGP ID
**type**\: :py:class:`XtcIgpInfoId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcIgpInfoId>`
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp, self).__init__()
self.yang_name = "igp"
self.yang_parent_name = "igp-information"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("isis", ("isis", Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Isis)), ("ospf", ("ospf", Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Ospf)), ("bgp", ("bgp", Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Bgp))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('igp_id', YLeaf(YType.enumeration, 'igp-id')),
])
self.igp_id = None
self.isis = Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Isis()
self.isis.parent = self
self._children_name_map["isis"] = "isis"
self._children_yang_names.add("isis")
self.ospf = Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Ospf()
self.ospf.parent = self
self._children_name_map["ospf"] = "ospf"
self._children_yang_names.add("ospf")
self.bgp = Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Bgp()
self.bgp.parent = self
self._children_name_map["bgp"] = "bgp"
self._children_yang_names.add("bgp")
self._segment_path = lambda: "igp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp, ['igp_id'], name, value)
class Isis(Entity):
"""
ISIS information
.. attribute:: system_id
ISIS system ID
**type**\: str
.. attribute:: level
ISIS level
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Isis, self).__init__()
self.yang_name = "isis"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('system_id', YLeaf(YType.str, 'system-id')),
('level', YLeaf(YType.uint32, 'level')),
])
self.system_id = None
self.level = None
self._segment_path = lambda: "isis"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Isis, ['system_id', 'level'], name, value)
class Ospf(Entity):
"""
OSPF information
.. attribute:: router_id
OSPF router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: area
OSPF area
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Ospf, self).__init__()
self.yang_name = "ospf"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
('area', YLeaf(YType.uint32, 'area')),
])
self.router_id = None
self.area = None
self._segment_path = lambda: "ospf"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Ospf, ['router_id', 'area'], name, value)
class Bgp(Entity):
"""
BGP information
.. attribute:: router_id
BGP router ID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Bgp, self).__init__()
self.yang_name = "bgp"
self.yang_parent_name = "igp"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', YLeaf(YType.str, 'router-id')),
])
self.router_id = None
self._segment_path = lambda: "bgp"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.PrefixInfos.PrefixInfo.NodeProtocolIdentifier.IgpInformation.Igp.Bgp, ['router_id'], name, value)
class Address(Entity):
"""
Prefix address
.. attribute:: af_name
AFName
**type**\: :py:class:`XtcAfId <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_xtc_agent_oper.XtcAfId>`
.. attribute:: ipv4
IPv4 address type
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
.. attribute:: ipv6
IPv6 address type
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'infra-xtc-agent-oper'
_revision = '2017-09-11'
def __init__(self):
super(Xtc.PrefixInfos.PrefixInfo.Address, self).__init__()
self.yang_name = "address"
self.yang_parent_name = "prefix-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('af_name', YLeaf(YType.enumeration, 'af-name')),
('ipv4', YLeaf(YType.str, 'ipv4')),
('ipv6', YLeaf(YType.str, 'ipv6')),
])
self.af_name = None
self.ipv4 = None
self.ipv6 = None
self._segment_path = lambda: "address"
def __setattr__(self, name, value):
self._perform_setattr(Xtc.PrefixInfos.PrefixInfo.Address, ['af_name', 'ipv4', 'ipv6'], name, value)
def clone_ptr(self):
self._top_entity = Xtc()
return self._top_entity
|
py | b41641713a4148dd08a8e1e1e3938d990ffee92f | from collections import abc
import os
import torch
from torch.nn import functional as F
from torch.autograd import Function
from torch.utils.cpp_extension import load
module_path = os.path.dirname(__file__)
upfirdn2d_op = load(
"upfirdn2d",
sources=[
os.path.join(module_path, "upfirdn2d.cpp"),
os.path.join(module_path, "upfirdn2d_kernel.cu"),
],
)
class UpFirDn2dBackward(Function):
@staticmethod
def forward(
ctx, grad_output, kernel, grad_kernel, up, down, pad, g_pad, in_size, out_size
):
up_x, up_y = up
down_x, down_y = down
g_pad_x0, g_pad_x1, g_pad_y0, g_pad_y1 = g_pad
grad_output = grad_output.reshape(-1, out_size[0], out_size[1], 1)
grad_input = upfirdn2d_op.upfirdn2d(
grad_output.contiguous(),
grad_kernel,
down_x,
down_y,
up_x,
up_y,
g_pad_x0,
g_pad_x1,
g_pad_y0,
g_pad_y1,
)
grad_input = grad_input.view(in_size[0], in_size[1], in_size[2], in_size[3])
ctx.save_for_backward(kernel)
pad_x0, pad_x1, pad_y0, pad_y1 = pad
ctx.up_x = up_x
ctx.up_y = up_y
ctx.down_x = down_x
ctx.down_y = down_y
ctx.pad_x0 = pad_x0
ctx.pad_x1 = pad_x1
ctx.pad_y0 = pad_y0
ctx.pad_y1 = pad_y1
ctx.in_size = in_size
ctx.out_size = out_size
return grad_input
@staticmethod
def backward(ctx, gradgrad_input):
kernel, = ctx.saved_tensors
gradgrad_input = gradgrad_input.reshape(-1, ctx.in_size[2], ctx.in_size[3], 1)
gradgrad_out = upfirdn2d_op.upfirdn2d(
gradgrad_input,
kernel,
ctx.up_x,
ctx.up_y,
ctx.down_x,
ctx.down_y,
ctx.pad_x0,
ctx.pad_x1,
ctx.pad_y0,
ctx.pad_y1,
)
# gradgrad_out = gradgrad_out.view(ctx.in_size[0], ctx.out_size[0], ctx.out_size[1], ctx.in_size[3])
gradgrad_out = gradgrad_out.view(
ctx.in_size[0], ctx.in_size[1], ctx.out_size[0], ctx.out_size[1]
)
return gradgrad_out, None, None, None, None, None, None, None, None
class UpFirDn2d(Function):
@staticmethod
def forward(ctx, input, kernel, up, down, pad):
up_x, up_y = up
down_x, down_y = down
pad_x0, pad_x1, pad_y0, pad_y1 = pad
kernel_h, kernel_w = kernel.shape
batch, channel, in_h, in_w = input.shape
ctx.in_size = input.shape
input = input.reshape(-1, in_h, in_w, 1)
ctx.save_for_backward(kernel, torch.flip(kernel, [0, 1]))
out_h = (in_h * up_y + pad_y0 + pad_y1 - kernel_h + down_y) // down_y
out_w = (in_w * up_x + pad_x0 + pad_x1 - kernel_w + down_x) // down_x
ctx.out_size = (out_h, out_w)
ctx.up = (up_x, up_y)
ctx.down = (down_x, down_y)
ctx.pad = (pad_x0, pad_x1, pad_y0, pad_y1)
g_pad_x0 = kernel_w - pad_x0 - 1
g_pad_y0 = kernel_h - pad_y0 - 1
g_pad_x1 = in_w * up_x - out_w * down_x + pad_x0 - up_x + 1
g_pad_y1 = in_h * up_y - out_h * down_y + pad_y0 - up_y + 1
ctx.g_pad = (g_pad_x0, g_pad_x1, g_pad_y0, g_pad_y1)
out = upfirdn2d_op.upfirdn2d(
input, kernel, up_x, up_y, down_x, down_y, pad_x0, pad_x1, pad_y0, pad_y1
)
# out = out.view(major, out_h, out_w, minor)
out = out.view(-1, channel, out_h, out_w)
return out
@staticmethod
def backward(ctx, grad_output):
kernel, grad_kernel = ctx.saved_tensors
grad_input = None
if ctx.needs_input_grad[0]:
grad_input = UpFirDn2dBackward.apply(
grad_output,
kernel,
grad_kernel,
ctx.up,
ctx.down,
ctx.pad,
ctx.g_pad,
ctx.in_size,
ctx.out_size,
)
return grad_input, None, None, None, None
def upfirdn2d(input, kernel, up=1, down=1, pad=(0, 0)):
if not isinstance(up, abc.Iterable):
up = (up, up)
if not isinstance(down, abc.Iterable):
down = (down, down)
if len(pad) == 2:
pad = (pad[0], pad[1], pad[0], pad[1])
if input.device.type == "cpu":
out = upfirdn2d_native(input, kernel, *up, *down, *pad)
else:
out = UpFirDn2d.apply(input, kernel, up, down, pad)
return out
def upfirdn2d_native(
input, kernel, up_x, up_y, down_x, down_y, pad_x0, pad_x1, pad_y0, pad_y1
):
_, channel, in_h, in_w = input.shape
input = input.reshape(-1, in_h, in_w, 1)
_, in_h, in_w, minor = input.shape
kernel_h, kernel_w = kernel.shape
out = input.view(-1, in_h, 1, in_w, 1, minor)
out = F.pad(out, [0, 0, 0, up_x - 1, 0, 0, 0, up_y - 1])
out = out.view(-1, in_h * up_y, in_w * up_x, minor)
out = F.pad(
out, [0, 0, max(pad_x0, 0), max(pad_x1, 0), max(pad_y0, 0), max(pad_y1, 0)]
)
out = out[
:,
max(-pad_y0, 0) : out.shape[1] - max(-pad_y1, 0),
max(-pad_x0, 0) : out.shape[2] - max(-pad_x1, 0),
:,
]
out = out.permute(0, 3, 1, 2)
out = out.reshape(
[-1, 1, in_h * up_y + pad_y0 + pad_y1, in_w * up_x + pad_x0 + pad_x1]
)
w = torch.flip(kernel, [0, 1]).view(1, 1, kernel_h, kernel_w)
out = F.conv2d(out, w)
out = out.reshape(
-1,
minor,
in_h * up_y + pad_y0 + pad_y1 - kernel_h + 1,
in_w * up_x + pad_x0 + pad_x1 - kernel_w + 1,
)
out = out.permute(0, 2, 3, 1)
out = out[:, ::down_y, ::down_x, :]
out_h = (in_h * up_y + pad_y0 + pad_y1 - kernel_h + down_y) // down_y
out_w = (in_w * up_x + pad_x0 + pad_x1 - kernel_w + down_x) // down_x
return out.view(-1, channel, out_h, out_w)
|
py | b416426ec87b4378a2e4bd05a18f0d04cc981264 | #!/usr/bin/env python
import os
from setuptools import find_packages
from setuptools import setup
VERSION = 0.3
version = os.path.join('bfly', '__init__.py')
README = open('README.md').read()
INSTALL_REQ = [
'h5py>=2.6.0',
'scipy>=0.17.0'
'numpy>=1.12.0',
'tornado>=4.4.2',
'futures>=3.0.5',
'pyaml>=16.12.2',
'tifffile>=0.11.1',
'pymongo>=3.4.0',
]
setup(
version=VERSION,
name='bfly',
author='Daniel Haehn',
packages=find_packages(),
author_email='[email protected]',
url="https://github.com/Rhoana/butterfly",
description="butterfly dense image server",
# Installation requirements
install_requires= INSTALL_REQ,
# Allows command line execution
entry_points=dict(console_scripts=[
'bfly = bfly.cli:main',
'bfly_query = bfly.cli:query',
])
)
|
py | b416431948fd6953183493dbd943137874aac7f3 | # Victoria Island Explorer Medal | Used for that Explore Medal Quest
if sm.getFieldID() == 105000000 and sm.hasQuest(30004):
sm.setPlayerAsSpeaker()
sm.sendNext("Welp, this thing is ancient, but seems to be working. Guess I should head back.")
sm.warp(910700200, 0) # Root Abyss Quest Line Map
sm.completeQuest(30004)
elif sm.getFieldID() == 104000000:
sm.showEffect("Map/Effect.img/maplemap/enter/104000000")
if sm.getChr().getJob() == 0 and chr.getLevel() == 10:
sm.setDirectionMode(False, 0)
sm.createQuestWithQRValue(17901, "1")
sm.completeQuest(17901)
sm.systemMessage("You cleared the Adventure Journal mission.")
sm.avatarOriented("Effect/OnUserEff.img/RecordClear_BT/clear")
elif sm.getFieldID() == 220080000 and sm.hasQuest(1662):
sm.chatScript("Enter papulatus.")
elif sm.getFieldID() == 120000100 and "1" in sm.getQRValue(5675):
sm.createQuestWithQRValue(2945, "1")
sm.createQuestWithQRValue(17902, "1")
sm.completeQuest(17902)
sm.systemMessage("You cleared the Adventure Journal mission.")
sm.avatarOriented("Effect/OnUserEff.img/RecordClear_BT/clear")
# Update Quest Record EX | Quest ID: [51236] | Data: StageKey=0
elif sm.getFieldID() == 100000000:
sm.setMapTaggedObjectVisible("2018TreeBuff", False, 0, 0)
elif sm.getFieldID() == 250000100:
sm.warp(932200005) |
py | b41643293d26f54e8571949723dad07824fd0c82 | import pytest
from . import unit
from chain_caller import this, resolve
@pytest.fixture
def _obj():
class Some:
string = 'string'
integer = 10
lst = [1, 1.5, 'String']
dct = {
'dict': {},
'string': 'String',
'integer': 10,
}
def chained(self):
return self
def __getattr__(self, name):
return name
def int_val(self, some):
return self.integer * some
@property
def some(self):
return self.lst
return Some()
@unit
def test_magic(_obj):
assert 10 == resolve(this.dct['integer'], _obj)
assert 'string' == resolve(this.dct['string'].lower(), _obj)
assert 1 == resolve(this.lst[0], _obj)
assert 1.5 == resolve(this.some[1], _obj)
assert None is resolve(this.dct['dict'].get('http', None), _obj)
assert 'name' == resolve(this.name, _obj)
assert True == resolve(this.name == 'name', _obj)
@unit
def test_func_calls(_obj):
assert 100 == resolve(this.int_val(10), _obj)
assert 100 == resolve(this.int_val(this.integer), _obj)
assert _obj == resolve(this.chained().chained(), _obj)
@unit
def test_math(_obj):
assert 100 == resolve(this.integer + 90, _obj)
assert 100 == resolve(this.integer * 10, _obj)
assert 100 == resolve(this.dct['integer'] * 10, _obj)
assert 100 == resolve(this.dct['integer'] * 10, _obj)
assert 100 == resolve(90 + this.integer, _obj)
assert 100 == resolve(10 * this.integer, _obj)
assert 100 == resolve(10 * this, 10)
@unit
def test_immutability(_obj):
assert resolve(this, _obj) is resolve(this, _obj) is _obj
assert resolve(this.integer, _obj) is resolve(this.integer, _obj)
assert resolve(this.chained().chained(), _obj) \
is resolve(this.chained(), _obj)
|
py | b41643873167b00f1d17e66fa12b81c323dab481 | """
Main module
"""
import app.api
import app.commands
from app.app import APP
|
py | b416459e73db1fabe63408ecb17454736bab8fb8 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: IM.Server.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from IM import BaseDefine_pb2 as IM_dot_BaseDefine__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='IM.Server.proto',
package='IM.Server',
syntax='proto3',
serialized_pb=_b('\n\x0fIM.Server.proto\x12\tIM.Server\x1a\x13IM.BaseDefine.proto\"%\n\x13IMStopReceivePacket\x12\x0e\n\x06result\x18\x01 \x01(\r\"I\n\rIMValidateReq\x12\x11\n\tuser_name\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x12\x13\n\x0b\x61ttach_data\x18\x14 \x01(\x0c\"\x8f\x01\n\rIMValidateRsp\x12\x11\n\tuser_name\x18\x01 \x01(\t\x12\x13\n\x0bresult_code\x18\x02 \x01(\r\x12\x15\n\rresult_string\x18\x03 \x01(\t\x12*\n\tuser_info\x18\x04 \x01(\x0b\x32\x17.IM.BaseDefine.UserInfo\x12\x13\n\x0b\x61ttach_data\x18\x14 \x01(\x0c\";\n\x13IMGetDeviceTokenReq\x12\x0f\n\x07user_id\x18\x01 \x03(\r\x12\x13\n\x0b\x61ttach_data\x18\x14 \x01(\x0c\"a\n\x13IMGetDeviceTokenRsp\x12\x35\n\x0fuser_token_info\x18\x01 \x03(\x0b\x32\x1c.IM.BaseDefine.UserTokenInfo\x12\x13\n\x0b\x61ttach_data\x18\x14 \x01(\x0c\"\x1b\n\tIMRoleSet\x12\x0e\n\x06master\x18\x01 \x01(\r\"I\n\x10IMOnlineUserInfo\x12\x35\n\x0euser_stat_list\x18\x01 \x03(\x0b\x32\x1d.IM.BaseDefine.ServerUserStat\"v\n\rIMMsgServInfo\x12\x0b\n\x03ip1\x18\x01 \x01(\t\x12\x0b\n\x03ip2\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\x12\x14\n\x0cmax_conn_cnt\x18\x04 \x01(\r\x12\x14\n\x0c\x63ur_conn_cnt\x18\x05 \x01(\r\x12\x11\n\thost_name\x18\x06 \x01(\t\"j\n\x12IMUserStatusUpdate\x12\x13\n\x0buser_status\x18\x01 \x01(\r\x12\x0f\n\x07user_id\x18\x02 \x01(\r\x12.\n\x0b\x63lient_type\x18\x03 \x01(\x0e\x32\x19.IM.BaseDefine.ClientType\"7\n\x0fIMUserCntUpdate\x12\x13\n\x0buser_action\x18\x01 \x01(\r\x12\x0f\n\x07user_id\x18\x02 \x01(\r\"c\n\x10IMServerKickUser\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12.\n\x0b\x63lient_type\x18\x02 \x01(\x0e\x32\x19.IM.BaseDefine.ClientType\x12\x0e\n\x06reason\x18\x03 \x01(\r\"D\n\x1bIMServerPCLoginStatusNotify\x12\x0f\n\x07user_id\x18\x01 \x01(\r\x12\x14\n\x0clogin_status\x18\x02 \x01(\r\"e\n\x0fIMPushToUserReq\x12\r\n\x05\x66lash\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\t\x12\x35\n\x0fuser_token_list\x18\x03 \x03(\x0b\x32\x1c.IM.BaseDefine.UserTokenInfo\"F\n\x0fIMPushToUserRsp\x12\x33\n\x10push_result_list\x18\x01 \x03(\x0b\x32\x19.IM.BaseDefine.PushResult\"M\n\x13IMGroupGetShieldReq\x12\x10\n\x08group_id\x18\x01 \x01(\r\x12\x0f\n\x07user_id\x18\x02 \x03(\r\x12\x13\n\x0b\x61ttach_data\x18\x14 \x01(\x0c\"u\n\x13IMGroupGetShieldRsp\x12\x10\n\x08group_id\x18\x01 \x01(\r\x12\x37\n\x12shield_status_list\x18\x02 \x03(\x0b\x32\x1b.IM.BaseDefine.ShieldStatus\x12\x13\n\x0b\x61ttach_data\x18\x14 \x01(\x0c\"\xad\x01\n\x11IMFileTransferReq\x12\x14\n\x0c\x66rom_user_id\x18\x01 \x01(\r\x12\x12\n\nto_user_id\x18\x02 \x01(\r\x12\x11\n\tfile_name\x18\x03 \x01(\t\x12\x11\n\tfile_size\x18\x04 \x01(\r\x12\x33\n\ntrans_mode\x18\x05 \x01(\x0e\x32\x1f.IM.BaseDefine.TransferFileType\x12\x13\n\x0b\x61ttach_data\x18\x14 \x01(\x0c\"\xd3\x01\n\x11IMFileTransferRsp\x12\x13\n\x0bresult_code\x18\x01 \x01(\r\x12\x14\n\x0c\x66rom_user_id\x18\x02 \x01(\r\x12\x12\n\nto_user_id\x18\x03 \x01(\r\x12\x11\n\tfile_name\x18\x04 \x01(\t\x12\x11\n\tfile_size\x18\x05 \x01(\r\x12\x0f\n\x07task_id\x18\x06 \x01(\t\x12\x33\n\ntrans_mode\x18\x07 \x01(\x0e\x32\x1f.IM.BaseDefine.TransferFileType\x12\x13\n\x0b\x61ttach_data\x18\x14 \x01(\x0c\"\x13\n\x11IMFileServerIPReq\"@\n\x11IMFileServerIPRsp\x12+\n\x0cip_addr_list\x18\x01 \x03(\x0b\x32\x15.IM.BaseDefine.IpAddrB\x02H\x03\x62\x06proto3')
,
dependencies=[IM_dot_BaseDefine__pb2.DESCRIPTOR,])
_IMSTOPRECEIVEPACKET = _descriptor.Descriptor(
name='IMStopReceivePacket',
full_name='IM.Server.IMStopReceivePacket',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='IM.Server.IMStopReceivePacket.result', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=51,
serialized_end=88,
)
_IMVALIDATEREQ = _descriptor.Descriptor(
name='IMValidateReq',
full_name='IM.Server.IMValidateReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_name', full_name='IM.Server.IMValidateReq.user_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='password', full_name='IM.Server.IMValidateReq.password', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attach_data', full_name='IM.Server.IMValidateReq.attach_data', index=2,
number=20, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=90,
serialized_end=163,
)
_IMVALIDATERSP = _descriptor.Descriptor(
name='IMValidateRsp',
full_name='IM.Server.IMValidateRsp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_name', full_name='IM.Server.IMValidateRsp.user_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='result_code', full_name='IM.Server.IMValidateRsp.result_code', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='result_string', full_name='IM.Server.IMValidateRsp.result_string', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_info', full_name='IM.Server.IMValidateRsp.user_info', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attach_data', full_name='IM.Server.IMValidateRsp.attach_data', index=4,
number=20, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=166,
serialized_end=309,
)
_IMGETDEVICETOKENREQ = _descriptor.Descriptor(
name='IMGetDeviceTokenReq',
full_name='IM.Server.IMGetDeviceTokenReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_id', full_name='IM.Server.IMGetDeviceTokenReq.user_id', index=0,
number=1, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attach_data', full_name='IM.Server.IMGetDeviceTokenReq.attach_data', index=1,
number=20, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=311,
serialized_end=370,
)
_IMGETDEVICETOKENRSP = _descriptor.Descriptor(
name='IMGetDeviceTokenRsp',
full_name='IM.Server.IMGetDeviceTokenRsp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_token_info', full_name='IM.Server.IMGetDeviceTokenRsp.user_token_info', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attach_data', full_name='IM.Server.IMGetDeviceTokenRsp.attach_data', index=1,
number=20, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=372,
serialized_end=469,
)
_IMROLESET = _descriptor.Descriptor(
name='IMRoleSet',
full_name='IM.Server.IMRoleSet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='master', full_name='IM.Server.IMRoleSet.master', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=471,
serialized_end=498,
)
_IMONLINEUSERINFO = _descriptor.Descriptor(
name='IMOnlineUserInfo',
full_name='IM.Server.IMOnlineUserInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_stat_list', full_name='IM.Server.IMOnlineUserInfo.user_stat_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=500,
serialized_end=573,
)
_IMMSGSERVINFO = _descriptor.Descriptor(
name='IMMsgServInfo',
full_name='IM.Server.IMMsgServInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ip1', full_name='IM.Server.IMMsgServInfo.ip1', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ip2', full_name='IM.Server.IMMsgServInfo.ip2', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='port', full_name='IM.Server.IMMsgServInfo.port', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='max_conn_cnt', full_name='IM.Server.IMMsgServInfo.max_conn_cnt', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='cur_conn_cnt', full_name='IM.Server.IMMsgServInfo.cur_conn_cnt', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='host_name', full_name='IM.Server.IMMsgServInfo.host_name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=575,
serialized_end=693,
)
_IMUSERSTATUSUPDATE = _descriptor.Descriptor(
name='IMUserStatusUpdate',
full_name='IM.Server.IMUserStatusUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_status', full_name='IM.Server.IMUserStatusUpdate.user_status', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='IM.Server.IMUserStatusUpdate.user_id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='client_type', full_name='IM.Server.IMUserStatusUpdate.client_type', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=695,
serialized_end=801,
)
_IMUSERCNTUPDATE = _descriptor.Descriptor(
name='IMUserCntUpdate',
full_name='IM.Server.IMUserCntUpdate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_action', full_name='IM.Server.IMUserCntUpdate.user_action', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='IM.Server.IMUserCntUpdate.user_id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=803,
serialized_end=858,
)
_IMSERVERKICKUSER = _descriptor.Descriptor(
name='IMServerKickUser',
full_name='IM.Server.IMServerKickUser',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_id', full_name='IM.Server.IMServerKickUser.user_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='client_type', full_name='IM.Server.IMServerKickUser.client_type', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='reason', full_name='IM.Server.IMServerKickUser.reason', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=860,
serialized_end=959,
)
_IMSERVERPCLOGINSTATUSNOTIFY = _descriptor.Descriptor(
name='IMServerPCLoginStatusNotify',
full_name='IM.Server.IMServerPCLoginStatusNotify',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='user_id', full_name='IM.Server.IMServerPCLoginStatusNotify.user_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='login_status', full_name='IM.Server.IMServerPCLoginStatusNotify.login_status', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=961,
serialized_end=1029,
)
_IMPUSHTOUSERREQ = _descriptor.Descriptor(
name='IMPushToUserReq',
full_name='IM.Server.IMPushToUserReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='flash', full_name='IM.Server.IMPushToUserReq.flash', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='IM.Server.IMPushToUserReq.data', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_token_list', full_name='IM.Server.IMPushToUserReq.user_token_list', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1031,
serialized_end=1132,
)
_IMPUSHTOUSERRSP = _descriptor.Descriptor(
name='IMPushToUserRsp',
full_name='IM.Server.IMPushToUserRsp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='push_result_list', full_name='IM.Server.IMPushToUserRsp.push_result_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1134,
serialized_end=1204,
)
_IMGROUPGETSHIELDREQ = _descriptor.Descriptor(
name='IMGroupGetShieldReq',
full_name='IM.Server.IMGroupGetShieldReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='group_id', full_name='IM.Server.IMGroupGetShieldReq.group_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='user_id', full_name='IM.Server.IMGroupGetShieldReq.user_id', index=1,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attach_data', full_name='IM.Server.IMGroupGetShieldReq.attach_data', index=2,
number=20, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1206,
serialized_end=1283,
)
_IMGROUPGETSHIELDRSP = _descriptor.Descriptor(
name='IMGroupGetShieldRsp',
full_name='IM.Server.IMGroupGetShieldRsp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='group_id', full_name='IM.Server.IMGroupGetShieldRsp.group_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='shield_status_list', full_name='IM.Server.IMGroupGetShieldRsp.shield_status_list', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attach_data', full_name='IM.Server.IMGroupGetShieldRsp.attach_data', index=2,
number=20, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1285,
serialized_end=1402,
)
_IMFILETRANSFERREQ = _descriptor.Descriptor(
name='IMFileTransferReq',
full_name='IM.Server.IMFileTransferReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='from_user_id', full_name='IM.Server.IMFileTransferReq.from_user_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='to_user_id', full_name='IM.Server.IMFileTransferReq.to_user_id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='file_name', full_name='IM.Server.IMFileTransferReq.file_name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='file_size', full_name='IM.Server.IMFileTransferReq.file_size', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trans_mode', full_name='IM.Server.IMFileTransferReq.trans_mode', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attach_data', full_name='IM.Server.IMFileTransferReq.attach_data', index=5,
number=20, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1405,
serialized_end=1578,
)
_IMFILETRANSFERRSP = _descriptor.Descriptor(
name='IMFileTransferRsp',
full_name='IM.Server.IMFileTransferRsp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='result_code', full_name='IM.Server.IMFileTransferRsp.result_code', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='from_user_id', full_name='IM.Server.IMFileTransferRsp.from_user_id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='to_user_id', full_name='IM.Server.IMFileTransferRsp.to_user_id', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='file_name', full_name='IM.Server.IMFileTransferRsp.file_name', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='file_size', full_name='IM.Server.IMFileTransferRsp.file_size', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='task_id', full_name='IM.Server.IMFileTransferRsp.task_id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='trans_mode', full_name='IM.Server.IMFileTransferRsp.trans_mode', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attach_data', full_name='IM.Server.IMFileTransferRsp.attach_data', index=7,
number=20, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1581,
serialized_end=1792,
)
_IMFILESERVERIPREQ = _descriptor.Descriptor(
name='IMFileServerIPReq',
full_name='IM.Server.IMFileServerIPReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1794,
serialized_end=1813,
)
_IMFILESERVERIPRSP = _descriptor.Descriptor(
name='IMFileServerIPRsp',
full_name='IM.Server.IMFileServerIPRsp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ip_addr_list', full_name='IM.Server.IMFileServerIPRsp.ip_addr_list', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1815,
serialized_end=1879,
)
_IMVALIDATERSP.fields_by_name['user_info'].message_type = IM_dot_BaseDefine__pb2._USERINFO
_IMGETDEVICETOKENRSP.fields_by_name['user_token_info'].message_type = IM_dot_BaseDefine__pb2._USERTOKENINFO
_IMONLINEUSERINFO.fields_by_name['user_stat_list'].message_type = IM_dot_BaseDefine__pb2._SERVERUSERSTAT
_IMUSERSTATUSUPDATE.fields_by_name['client_type'].enum_type = IM_dot_BaseDefine__pb2._CLIENTTYPE
_IMSERVERKICKUSER.fields_by_name['client_type'].enum_type = IM_dot_BaseDefine__pb2._CLIENTTYPE
_IMPUSHTOUSERREQ.fields_by_name['user_token_list'].message_type = IM_dot_BaseDefine__pb2._USERTOKENINFO
_IMPUSHTOUSERRSP.fields_by_name['push_result_list'].message_type = IM_dot_BaseDefine__pb2._PUSHRESULT
_IMGROUPGETSHIELDRSP.fields_by_name['shield_status_list'].message_type = IM_dot_BaseDefine__pb2._SHIELDSTATUS
_IMFILETRANSFERREQ.fields_by_name['trans_mode'].enum_type = IM_dot_BaseDefine__pb2._TRANSFERFILETYPE
_IMFILETRANSFERRSP.fields_by_name['trans_mode'].enum_type = IM_dot_BaseDefine__pb2._TRANSFERFILETYPE
_IMFILESERVERIPRSP.fields_by_name['ip_addr_list'].message_type = IM_dot_BaseDefine__pb2._IPADDR
DESCRIPTOR.message_types_by_name['IMStopReceivePacket'] = _IMSTOPRECEIVEPACKET
DESCRIPTOR.message_types_by_name['IMValidateReq'] = _IMVALIDATEREQ
DESCRIPTOR.message_types_by_name['IMValidateRsp'] = _IMVALIDATERSP
DESCRIPTOR.message_types_by_name['IMGetDeviceTokenReq'] = _IMGETDEVICETOKENREQ
DESCRIPTOR.message_types_by_name['IMGetDeviceTokenRsp'] = _IMGETDEVICETOKENRSP
DESCRIPTOR.message_types_by_name['IMRoleSet'] = _IMROLESET
DESCRIPTOR.message_types_by_name['IMOnlineUserInfo'] = _IMONLINEUSERINFO
DESCRIPTOR.message_types_by_name['IMMsgServInfo'] = _IMMSGSERVINFO
DESCRIPTOR.message_types_by_name['IMUserStatusUpdate'] = _IMUSERSTATUSUPDATE
DESCRIPTOR.message_types_by_name['IMUserCntUpdate'] = _IMUSERCNTUPDATE
DESCRIPTOR.message_types_by_name['IMServerKickUser'] = _IMSERVERKICKUSER
DESCRIPTOR.message_types_by_name['IMServerPCLoginStatusNotify'] = _IMSERVERPCLOGINSTATUSNOTIFY
DESCRIPTOR.message_types_by_name['IMPushToUserReq'] = _IMPUSHTOUSERREQ
DESCRIPTOR.message_types_by_name['IMPushToUserRsp'] = _IMPUSHTOUSERRSP
DESCRIPTOR.message_types_by_name['IMGroupGetShieldReq'] = _IMGROUPGETSHIELDREQ
DESCRIPTOR.message_types_by_name['IMGroupGetShieldRsp'] = _IMGROUPGETSHIELDRSP
DESCRIPTOR.message_types_by_name['IMFileTransferReq'] = _IMFILETRANSFERREQ
DESCRIPTOR.message_types_by_name['IMFileTransferRsp'] = _IMFILETRANSFERRSP
DESCRIPTOR.message_types_by_name['IMFileServerIPReq'] = _IMFILESERVERIPREQ
DESCRIPTOR.message_types_by_name['IMFileServerIPRsp'] = _IMFILESERVERIPRSP
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
IMStopReceivePacket = _reflection.GeneratedProtocolMessageType('IMStopReceivePacket', (_message.Message,), dict(
DESCRIPTOR = _IMSTOPRECEIVEPACKET,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMStopReceivePacket)
))
_sym_db.RegisterMessage(IMStopReceivePacket)
IMValidateReq = _reflection.GeneratedProtocolMessageType('IMValidateReq', (_message.Message,), dict(
DESCRIPTOR = _IMVALIDATEREQ,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMValidateReq)
))
_sym_db.RegisterMessage(IMValidateReq)
IMValidateRsp = _reflection.GeneratedProtocolMessageType('IMValidateRsp', (_message.Message,), dict(
DESCRIPTOR = _IMVALIDATERSP,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMValidateRsp)
))
_sym_db.RegisterMessage(IMValidateRsp)
IMGetDeviceTokenReq = _reflection.GeneratedProtocolMessageType('IMGetDeviceTokenReq', (_message.Message,), dict(
DESCRIPTOR = _IMGETDEVICETOKENREQ,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMGetDeviceTokenReq)
))
_sym_db.RegisterMessage(IMGetDeviceTokenReq)
IMGetDeviceTokenRsp = _reflection.GeneratedProtocolMessageType('IMGetDeviceTokenRsp', (_message.Message,), dict(
DESCRIPTOR = _IMGETDEVICETOKENRSP,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMGetDeviceTokenRsp)
))
_sym_db.RegisterMessage(IMGetDeviceTokenRsp)
IMRoleSet = _reflection.GeneratedProtocolMessageType('IMRoleSet', (_message.Message,), dict(
DESCRIPTOR = _IMROLESET,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMRoleSet)
))
_sym_db.RegisterMessage(IMRoleSet)
IMOnlineUserInfo = _reflection.GeneratedProtocolMessageType('IMOnlineUserInfo', (_message.Message,), dict(
DESCRIPTOR = _IMONLINEUSERINFO,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMOnlineUserInfo)
))
_sym_db.RegisterMessage(IMOnlineUserInfo)
IMMsgServInfo = _reflection.GeneratedProtocolMessageType('IMMsgServInfo', (_message.Message,), dict(
DESCRIPTOR = _IMMSGSERVINFO,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMMsgServInfo)
))
_sym_db.RegisterMessage(IMMsgServInfo)
IMUserStatusUpdate = _reflection.GeneratedProtocolMessageType('IMUserStatusUpdate', (_message.Message,), dict(
DESCRIPTOR = _IMUSERSTATUSUPDATE,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMUserStatusUpdate)
))
_sym_db.RegisterMessage(IMUserStatusUpdate)
IMUserCntUpdate = _reflection.GeneratedProtocolMessageType('IMUserCntUpdate', (_message.Message,), dict(
DESCRIPTOR = _IMUSERCNTUPDATE,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMUserCntUpdate)
))
_sym_db.RegisterMessage(IMUserCntUpdate)
IMServerKickUser = _reflection.GeneratedProtocolMessageType('IMServerKickUser', (_message.Message,), dict(
DESCRIPTOR = _IMSERVERKICKUSER,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMServerKickUser)
))
_sym_db.RegisterMessage(IMServerKickUser)
IMServerPCLoginStatusNotify = _reflection.GeneratedProtocolMessageType('IMServerPCLoginStatusNotify', (_message.Message,), dict(
DESCRIPTOR = _IMSERVERPCLOGINSTATUSNOTIFY,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMServerPCLoginStatusNotify)
))
_sym_db.RegisterMessage(IMServerPCLoginStatusNotify)
IMPushToUserReq = _reflection.GeneratedProtocolMessageType('IMPushToUserReq', (_message.Message,), dict(
DESCRIPTOR = _IMPUSHTOUSERREQ,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMPushToUserReq)
))
_sym_db.RegisterMessage(IMPushToUserReq)
IMPushToUserRsp = _reflection.GeneratedProtocolMessageType('IMPushToUserRsp', (_message.Message,), dict(
DESCRIPTOR = _IMPUSHTOUSERRSP,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMPushToUserRsp)
))
_sym_db.RegisterMessage(IMPushToUserRsp)
IMGroupGetShieldReq = _reflection.GeneratedProtocolMessageType('IMGroupGetShieldReq', (_message.Message,), dict(
DESCRIPTOR = _IMGROUPGETSHIELDREQ,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMGroupGetShieldReq)
))
_sym_db.RegisterMessage(IMGroupGetShieldReq)
IMGroupGetShieldRsp = _reflection.GeneratedProtocolMessageType('IMGroupGetShieldRsp', (_message.Message,), dict(
DESCRIPTOR = _IMGROUPGETSHIELDRSP,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMGroupGetShieldRsp)
))
_sym_db.RegisterMessage(IMGroupGetShieldRsp)
IMFileTransferReq = _reflection.GeneratedProtocolMessageType('IMFileTransferReq', (_message.Message,), dict(
DESCRIPTOR = _IMFILETRANSFERREQ,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMFileTransferReq)
))
_sym_db.RegisterMessage(IMFileTransferReq)
IMFileTransferRsp = _reflection.GeneratedProtocolMessageType('IMFileTransferRsp', (_message.Message,), dict(
DESCRIPTOR = _IMFILETRANSFERRSP,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMFileTransferRsp)
))
_sym_db.RegisterMessage(IMFileTransferRsp)
IMFileServerIPReq = _reflection.GeneratedProtocolMessageType('IMFileServerIPReq', (_message.Message,), dict(
DESCRIPTOR = _IMFILESERVERIPREQ,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMFileServerIPReq)
))
_sym_db.RegisterMessage(IMFileServerIPReq)
IMFileServerIPRsp = _reflection.GeneratedProtocolMessageType('IMFileServerIPRsp', (_message.Message,), dict(
DESCRIPTOR = _IMFILESERVERIPRSP,
__module__ = 'IM.Server_pb2'
# @@protoc_insertion_point(class_scope:IM.Server.IMFileServerIPRsp)
))
_sym_db.RegisterMessage(IMFileServerIPRsp)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('H\003'))
# @@protoc_insertion_point(module_scope)
|
py | b41645a5fdaf06ef89f6c80ad6c436130c2d6667 | # classification.py
"""
Description: This file contains a set of python functions for conducting
machine learning classification on remote sensing data contained in an
Open Data Cube instance.
License: The code in this notebook is licensed under the Apache License,
Version 2.0 (https://www.apache.org/licenses/LICENSE-2.0).
Contact: If you need assistance, please post a question on the Open Data
Cube Slack channel (http://slack.opendatacube.org/) or on the GIS Stack
Exchange (https://gis.stackexchange.com/questions/ask?tags=open-data-cube)
using the `open-data-cube` tag (you can view previously asked questions
here: https://gis.stackexchange.com/questions/tagged/open-data-cube).
If you would like to report an issue with this script, you can file one on
Github https://github.com/GeoscienceAustralia/dea-notebooks/issues
Last modified: May 2021
"""
import os
import sys
import joblib
import datacube
import rasterio
import numpy as np
import pandas as pd
import xarray as xr
import time
from tqdm.auto import tqdm
import dask.array as da
import geopandas as gpd
from copy import deepcopy
import multiprocessing as mp
import dask.distributed as dd
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
from sklearn.utils import check_random_state
from abc import ABCMeta, abstractmethod
from datacube.utils import geometry
from sklearn.base import ClusterMixin
from dask.diagnostics import ProgressBar
from rasterio.features import rasterize
from dask_ml.wrappers import ParallelPostFit
from sklearn.mixture import GaussianMixture
from datacube.utils.geometry import assign_crs
from sklearn.cluster import AgglomerativeClustering
from sklearn.model_selection import KFold, ShuffleSplit
from sklearn.model_selection import BaseCrossValidator
import warnings
from dea_tools.spatial import xr_rasterize
def sklearn_flatten(input_xr):
"""
Reshape a DataArray or Dataset with spatial (and optionally
temporal) structure into an np.array with the spatial and temporal
dimensions flattened into one dimension.
This flattening procedure enables DataArrays and Datasets to be used
to train and predict
with sklearn models.
Last modified: September 2019
Parameters
----------
input_xr : xarray.DataArray or xarray.Dataset
Must have dimensions 'x' and 'y', may have dimension 'time'.
Dimensions other than 'x', 'y' and 'time' are unaffected by the
flattening.
Returns
----------
input_np : numpy.array
A numpy array corresponding to input_xr.data (or
input_xr.to_array().data), with dimensions 'x','y' and 'time'
flattened into a single dimension, which is the first axis of
the returned array. input_np contains no NaNs.
"""
# cast input Datasets to DataArray
if isinstance(input_xr, xr.Dataset):
input_xr = input_xr.to_array()
# stack across pixel dimensions, handling timeseries if necessary
if "time" in input_xr.dims:
stacked = input_xr.stack(z=["x", "y", "time"])
else:
stacked = input_xr.stack(z=["x", "y"])
# finding 'bands' dimensions in each pixel - these will not be
# flattened as their context is important for sklearn
pxdims = []
for dim in stacked.dims:
if dim != "z":
pxdims.append(dim)
# mask NaNs - we mask pixels with NaNs in *any* band, because
# sklearn cannot accept NaNs as input
mask = np.isnan(stacked)
if len(pxdims) != 0:
mask = mask.any(dim=pxdims)
# turn the mask into a numpy array (boolean indexing with xarrays
# acts weird)
mask = mask.data
# the dimension we are masking along ('z') needs to be the first
# dimension in the underlying np array for the boolean indexing to work
stacked = stacked.transpose("z", *pxdims)
input_np = stacked.data[~mask]
return input_np
def sklearn_unflatten(output_np, input_xr):
"""
Reshape a numpy array with no 'missing' elements (NaNs) and
'flattened' spatiotemporal structure into a DataArray matching the
spatiotemporal structure of the DataArray
This enables an sklearn model's prediction to be remapped to the
correct pixels in the input DataArray or Dataset.
Last modified: September 2019
Parameters
----------
output_np : numpy.array
The first dimension's length should correspond to the number of
valid (non-NaN) pixels in input_xr.
input_xr : xarray.DataArray or xarray.Dataset
Must have dimensions 'x' and 'y', may have dimension 'time'.
Dimensions other than 'x', 'y' and 'time' are unaffected by the
flattening.
Returns
----------
output_xr : xarray.DataArray
An xarray.DataArray with the same dimensions 'x', 'y' and 'time'
as input_xr, and the same valid (non-NaN) pixels. These pixels
are set to match the data in output_np.
"""
# the output of a sklearn model prediction should just be a numpy array
# with size matching x*y*time for the input DataArray/Dataset.
# cast input Datasets to DataArray
if isinstance(input_xr, xr.Dataset):
input_xr = input_xr.to_array()
# generate the same mask we used to create the input to the sklearn model
if "time" in input_xr.dims:
stacked = input_xr.stack(z=["x", "y", "time"])
else:
stacked = input_xr.stack(z=["x", "y"])
pxdims = []
for dim in stacked.dims:
if dim != "z":
pxdims.append(dim)
mask = np.isnan(stacked)
if len(pxdims) != 0:
mask = mask.any(dim=pxdims)
# handle multivariable output
output_px_shape = ()
if len(output_np.shape[1:]):
output_px_shape = output_np.shape[1:]
# use the mask to put the data in all the right places
output_ma = np.ma.empty((len(stacked.z), *output_px_shape))
output_ma[~mask] = output_np
output_ma[mask] = np.ma.masked
# set the stacked coordinate to match the input
output_xr = xr.DataArray(
output_ma,
coords={"z": stacked["z"]},
dims=["z", *["output_dim_" + str(idx) for idx in range(len(output_px_shape))]],
)
output_xr = output_xr.unstack()
return output_xr
def fit_xr(model, input_xr):
"""
Utilise our wrappers to fit a vanilla sklearn model.
Last modified: September 2019
Parameters
----------
model : scikit-learn model or compatible object
Must have a fit() method that takes numpy arrays.
input_xr : xarray.DataArray or xarray.Dataset.
Must have dimensions 'x' and 'y', may have dimension 'time'.
Returns
----------
model : a scikit-learn model which has been fitted to the data in
the pixels of input_xr.
"""
model = model.fit(sklearn_flatten(input_xr))
return model
def predict_xr(
model,
input_xr,
chunk_size=None,
persist=False,
proba=False,
clean=False,
return_input=False,
):
"""
Using dask-ml ParallelPostfit(), runs the parallel
predict and predict_proba methods of sklearn
estimators. Useful for running predictions
on a larger-than-RAM datasets.
Last modified: September 2020
Parameters
----------
model : scikit-learn model or compatible object
Must have a .predict() method that takes numpy arrays.
input_xr : xarray.DataArray or xarray.Dataset.
Must have dimensions 'x' and 'y'
chunk_size : int
The dask chunk size to use on the flattened array. If this
is left as None, then the chunks size is inferred from the
.chunks method on the `input_xr`
persist : bool
If True, and proba=True, then 'input_xr' data will be
loaded into distributed memory. This will ensure data
is not loaded twice for the prediction of probabilities,
but this will only work if the data is not larger than
distributed RAM.
proba : bool
If True, predict probabilities
clean : bool
If True, remove Infs and NaNs from input and output arrays
return_input : bool
If True, then the data variables in the 'input_xr' dataset will
be appended to the output xarray dataset.
Returns
----------
output_xr : xarray.Dataset
An xarray.Dataset containing the prediction output from model.
if proba=True then dataset will also contain probabilites, and
if return_input=True then dataset will have the input feature layers.
Has the same spatiotemporal structure as input_xr.
"""
# if input_xr isn't dask, coerce it
dask = True
if not bool(input_xr.chunks):
dask = False
input_xr = input_xr.chunk({"x": len(input_xr.x), "y": len(input_xr.y)})
# set chunk size if not supplied
if chunk_size is None:
chunk_size = int(input_xr.chunks["x"][0]) * int(input_xr.chunks["y"][0])
def _predict_func(model, input_xr, persist, proba, clean, return_input):
x, y, crs = input_xr.x, input_xr.y, input_xr.geobox.crs
input_data = []
for var_name in input_xr.data_vars:
input_data.append(input_xr[var_name])
input_data_flattened = []
for arr in input_data:
data = arr.data.flatten().rechunk(chunk_size)
input_data_flattened.append(data)
# reshape for prediction
input_data_flattened = da.array(input_data_flattened).transpose()
if clean == True:
input_data_flattened = da.where(
da.isfinite(input_data_flattened), input_data_flattened, 0
)
if (proba == True) & (persist == True):
# persisting data so we don't require loading all the data twice
input_data_flattened = input_data_flattened.persist()
# apply the classification
print("predicting...")
out_class = model.predict(input_data_flattened)
# Mask out NaN or Inf values in results
if clean == True:
out_class = da.where(da.isfinite(out_class), out_class, 0)
# Reshape when writing out
out_class = out_class.reshape(len(y), len(x))
# stack back into xarray
output_xr = xr.DataArray(out_class, coords={"x": x, "y": y}, dims=["y", "x"])
output_xr = output_xr.to_dataset(name="Predictions")
if proba == True:
print(" probabilities...")
out_proba = model.predict_proba(input_data_flattened)
# convert to %
out_proba = da.max(out_proba, axis=1) * 100.0
if clean == True:
out_proba = da.where(da.isfinite(out_proba), out_proba, 0)
out_proba = out_proba.reshape(len(y), len(x))
out_proba = xr.DataArray(
out_proba, coords={"x": x, "y": y}, dims=["y", "x"]
)
output_xr["Probabilities"] = out_proba
if return_input == True:
print(" input features...")
# unflatten the input_data_flattened array and append
# to the output_xr containin the predictions
arr = input_xr.to_array()
stacked = arr.stack(z=["y", "x"])
# handle multivariable output
output_px_shape = ()
if len(input_data_flattened.shape[1:]):
output_px_shape = input_data_flattened.shape[1:]
output_features = input_data_flattened.reshape(
(len(stacked.z), *output_px_shape)
)
# set the stacked coordinate to match the input
output_features = xr.DataArray(
output_features,
coords={"z": stacked["z"]},
dims=[
"z",
*["output_dim_" + str(idx) for idx in range(len(output_px_shape))],
],
).unstack()
# convert to dataset and rename arrays
output_features = output_features.to_dataset(dim="output_dim_0")
data_vars = list(input_xr.data_vars)
output_features = output_features.rename(
{i: j for i, j in zip(output_features.data_vars, data_vars)}
)
# merge with predictions
output_xr = xr.merge([output_xr, output_features], compat="override")
return assign_crs(output_xr, str(crs))
if dask == True:
# convert model to dask predict
model = ParallelPostFit(model)
with joblib.parallel_backend("dask"):
output_xr = _predict_func(
model, input_xr, persist, proba, clean, return_input
)
else:
output_xr = _predict_func(
model, input_xr, persist, proba, clean, return_input
).compute()
return output_xr
class HiddenPrints:
"""
For concealing unwanted print statements called by other functions
"""
def __enter__(self):
self._original_stdout = sys.stdout
sys.stdout = open(os.devnull, "w")
def __exit__(self, exc_type, exc_val, exc_tb):
sys.stdout.close()
sys.stdout = self._original_stdout
def _get_training_data_for_shp(
gdf,
index,
row,
out_arrs,
out_vars,
dc_query,
return_coords,
feature_func=None,
field=None,
zonal_stats=None,
):
"""
This is the core function that is triggered by `collect_training_data`.
The `collect_training_data` function loops through geometries in a geopandas
geodataframe and runs the code within `_get_training_data_for_shp`.
Parameters are inherited from `collect_training_data`.
See that function for information on the other params not listed below.
Parameters
----------
index, row : iterables inherited from geopandas object
out_arrs : list
An empty list into which the training data arrays are stored.
out_vars : list
An empty list into which the data varaible names are stored.
Returns
--------
Two lists, a list of numpy.arrays containing classes and extracted data for
each pixel or polygon, and another containing the data variable names.
"""
# prevent function altering dictionary kwargs
dc_query = deepcopy(dc_query)
# remove dask chunks if supplied as using
# mulitprocessing for parallization
if "dask_chunks" in dc_query.keys():
dc_query.pop("dask_chunks", None)
# set up query based on polygon
geom = geometry.Geometry(geom=gdf.iloc[index].geometry, crs=gdf.crs)
q = {"geopolygon": geom}
# merge polygon query with user supplied query params
dc_query.update(q)
# Use input feature function
data = feature_func(dc_query)
# create polygon mask
mask = xr_rasterize(gdf.iloc[[index]], data)
data = data.where(mask)
# Check that feature_func has removed time
if "time" in data.dims:
t = data.dims["time"]
if t > 1:
raise ValueError(
"After running the feature_func, the dataset still has "
+ str(t)
+ " time-steps, dataset must only have"
+ " x and y dimensions."
)
if return_coords == True:
# turn coords into a variable in the ds
data["x_coord"] = data.x + 0 * data.y
data["y_coord"] = data.y + 0 * data.x
# append ID measurement to dataset for tracking failures
band = [m for m in data.data_vars][0]
_id = xr.zeros_like(data[band])
data["id"] = _id
data["id"] = data["id"] + gdf.iloc[index]["id"]
# If no zonal stats were requested then extract all pixel values
if zonal_stats is None:
flat_train = sklearn_flatten(data)
flat_val = np.repeat(row[field], flat_train.shape[0])
stacked = np.hstack((np.expand_dims(flat_val, axis=1), flat_train))
elif zonal_stats in ["mean", "median", "max", "min"]:
method_to_call = getattr(data, zonal_stats)
flat_train = method_to_call()
flat_train = flat_train.to_array()
stacked = np.hstack((row[field], flat_train))
else:
raise Exception(
zonal_stats
+ " is not one of the supported"
+ " reduce functions ('mean','median','max','min')"
)
out_arrs.append(stacked)
out_vars.append([field] + list(data.data_vars))
def _get_training_data_parallel(
gdf, dc_query, ncpus, return_coords, feature_func=None, field=None, zonal_stats=None
):
"""
Function passing the '_get_training_data_for_shp' function
to a mulitprocessing.Pool.
Inherits variables from 'collect_training_data()'.
"""
# Check if dask-client is running
try:
zx = None
zx = dd.get_client()
except:
pass
if zx is not None:
raise ValueError(
"You have a Dask Client running, which prevents \n"
"this function from multiprocessing. Close the client."
)
# instantiate lists that can be shared across processes
manager = mp.Manager()
results = manager.list()
column_names = manager.list()
# progress bar
pbar = tqdm(total=len(gdf))
def update(*a):
pbar.update()
with mp.Pool(ncpus) as pool:
for index, row in gdf.iterrows():
pool.apply_async(
_get_training_data_for_shp,
[
gdf,
index,
row,
results,
column_names,
dc_query,
return_coords,
feature_func,
field,
zonal_stats,
],
callback=update,
)
pool.close()
pool.join()
pbar.close()
return column_names, results
def collect_training_data(
gdf,
dc_query,
ncpus=1,
return_coords=False,
feature_func=None,
field=None,
zonal_stats=None,
clean=True,
fail_threshold=0.02,
fail_ratio=0.5,
max_retries=3,
):
"""
This function provides methods for gathering training data from the ODC over
geometries stored within a geopandas geodataframe. The function will return a
'model_input' array containing stacked training data arrays with all NaNs & Infs removed.
In the instance where ncpus > 1, a parallel version of the function will be run
(functions are passed to a mp.Pool()). This function can conduct zonal statistics if
the supplied shapefile contains polygons. The 'feature_func' parameter defines what
features to produce.
Parameters
----------
gdf : geopandas geodataframe
geometry data in the form of a geopandas geodataframe
dc_query : dictionary
Datacube query object, should not contain lat and long (x or y)
variables as these are supplied by the 'gdf' variable
ncpus : int
The number of cpus/processes over which to parallelize the gathering
of training data (only if ncpus is > 1). Use 'mp.cpu_count()' to determine the number of
cpus available on a machine. Defaults to 1.
return_coords : bool
If True, then the training data will contain two extra columns 'x_coord' and
'y_coord' corresponding to the x,y coordinate of each sample. This variable can
be useful for handling spatial autocorrelation between samples later in the ML workflow.
feature_func : function
A function for generating feature layers that is applied to the data within
the bounds of the input geometry. The 'feature_func' must accept a 'dc_query'
object, and return a single xarray.Dataset or xarray.DataArray containing
2D coordinates (i.e x, y - no time dimension).
e.g.
def feature_function(query):
dc = datacube.Datacube(app='feature_layers')
ds = dc.load(**query)
ds = ds.mean('time')
return ds
field : str
Name of the column in the gdf that contains the class labels
zonal_stats : string, optional
An optional string giving the names of zonal statistics to calculate
for each polygon. Default is None (all pixel values are returned). Supported
values are 'mean', 'median', 'max', 'min'.
clean : bool
Whether or not to remove missing values in the training dataset. If True,
training labels with any NaNs or Infs in the feature layers will be dropped
from the dataset.
fail_threshold : float, default 0.02
Silent read fails on S3 can result in some rows of the returned data containing NaN values.
The'fail_threshold' fraction specifies a % of acceptable fails.
e.g. Setting 'fail_threshold' to 0.05 means if >5% of the samples in the training dataset
fail then those samples will be reutnred to the multiprocessing queue. Below this fraction
the function will accept the failures and return the results.
fail_ratio: float
A float between 0 and 1 that defines if a given training sample has failed.
Default is 0.5, which means if 50 % of the measurements in a given sample return null
values, and the number of total fails is more than the fail_threshold, the samplewill be
passed to the retry queue.
max_retries: int, default 3
Maximum number of times to retry collecting samples. This number is invoked
if the 'fail_threshold' is not reached.
Returns
--------
Two lists, a list of numpy.arrays containing classes and extracted data for
each pixel or polygon, and another containing the data variable names.
"""
# check the dtype of the class field
if gdf[field].dtype != np.int:
raise ValueError(
'The "field" column of the input vector must contain integer dtypes'
)
# check for feature_func
if feature_func is None:
raise ValueError(
"Please supply a feature layer function through the "
+"parameter 'feature_func'"
)
if zonal_stats is not None:
print("Taking zonal statistic: " + zonal_stats)
# add unique id to gdf to help with indexing failed rows
# during multiprocessing
# if zonal_stats is not None:
gdf["id"] = range(0, len(gdf))
if ncpus == 1:
# progress indicator
print("Collecting training data in serial mode")
i = 0
# list to store results
results = []
column_names = []
# loop through polys and extract training data
for index, row in gdf.iterrows():
print(" Feature {:04}/{:04}\r".format(i + 1, len(gdf)), end="")
_get_training_data_for_shp(
gdf,
index,
row,
results,
column_names,
dc_query,
return_coords,
feature_func,
field,
zonal_stats,
)
i += 1
else:
print("Collecting training data in parallel mode")
column_names, results = _get_training_data_parallel(
gdf=gdf,
dc_query=dc_query,
ncpus=ncpus,
return_coords=return_coords,
feature_func=feature_func,
field=field,
zonal_stats=zonal_stats,
)
# column names are appended during each iteration
# but they are identical, grab only the first instance
column_names = column_names[0]
# Stack the extracted training data for each feature into a single array
model_input = np.vstack(results)
# this code block below iteratively retries failed rows
# up to max_retries or until fail_threshold is
# reached - whichever occurs first
if ncpus > 1:
i = 1
while i <= max_retries:
# Find % of fails (null values) in data. Use Pandas for simplicity
df = pd.DataFrame(data=model_input[:, 0:-1], index=model_input[:, -1])
# how many nan values per id?
num_nans = df.isnull().sum(axis=1)
num_nans = num_nans.groupby(num_nans.index).sum()
# how many valid values per id?
num_valid = df.notnull().sum(axis=1)
num_valid = num_valid.groupby(num_valid.index).sum()
# find fail rate
perc_fail = num_nans / (num_nans + num_valid)
fail_ids = perc_fail[perc_fail > fail_ratio]
fail_rate = len(fail_ids) / len(gdf)
print(
"Percentage of possible fails after run "
+ str(i)
+ " = "
+ str(round(fail_rate * 100, 2))
+ " %"
)
if fail_rate > fail_threshold:
print("Recollecting samples that failed")
fail_ids = list(fail_ids.index)
# keep only the ids in model_input object that didn't fail
model_input = model_input[~np.isin(model_input[:, -1], fail_ids)]
# index out the fail_ids from the original gdf
gdf_rerun = gdf.loc[gdf["id"].isin(fail_ids)]
gdf_rerun = gdf_rerun.reset_index(drop=True)
time.sleep(5) # sleep for 5s to rest api
# recollect failed rows
column_names_again, results_again = _get_training_data_parallel(
gdf=gdf_rerun,
dc_query=dc_query,
ncpus=ncpus,
return_coords=return_coords,
feature_func=feature_func,
field=field,
zonal_stats=zonal_stats,
)
# Stack the extracted training data for each feature into a single array
model_input_again = np.vstack(results_again)
# merge results of the re-run with original run
model_input = np.vstack((model_input, model_input_again))
i += 1
else:
break
# -----------------------------------------------
# remove id column
idx_var = column_names[0:-1]
model_col_indices = [column_names.index(var_name) for var_name in idx_var]
model_input = model_input[:, model_col_indices]
if clean == True:
num = np.count_nonzero(np.isnan(model_input).any(axis=1))
model_input = model_input[~np.isnan(model_input).any(axis=1)]
model_input = model_input[~np.isinf(model_input).any(axis=1)]
print("Removed " + str(num) + " rows wth NaNs &/or Infs")
print("Output shape: ", model_input.shape)
else:
print("Returning data without cleaning")
print("Output shape: ", model_input.shape)
return column_names[0:-1], model_input
class KMeans_tree(ClusterMixin):
"""
A hierarchical KMeans unsupervised clustering model. This class is
a clustering model, so it inherits scikit-learn's ClusterMixin
base class.
Parameters
----------
n_levels : integer, default 2
number of levels in the tree of clustering models.
n_clusters : integer, default 3
Number of clusters in each of the constituent KMeans models in
the tree.
**kwargs : optional
Other keyword arguments to be passed directly to the KMeans
initialiser.
"""
def __init__(self, n_levels=2, n_clusters=3, **kwargs):
assert n_levels >= 1
self.base_model = KMeans(n_clusters=3, **kwargs)
self.n_levels = n_levels
self.n_clusters = n_clusters
# make child models
if n_levels > 1:
self.branches = [
KMeans_tree(n_levels=n_levels - 1, n_clusters=n_clusters, **kwargs)
for _ in range(n_clusters)
]
def fit(self, X, y=None, sample_weight=None):
"""
Fit the tree of KMeans models. All parameters mimic those
of KMeans.fit().
Parameters
----------
X : array-like or sparse matrix, shape=(n_samples, n_features)
Training instances to cluster. It must be noted that the
data will be converted to C ordering, which will cause a
memory copy if the given data is not C-contiguous.
y : Ignored
not used, present here for API consistency by convention.
sample_weight : array-like, shape (n_samples,), optional
The weights for each observation in X. If None, all
observations are assigned equal weight (default: None)
"""
self.labels_ = self.base_model.fit(X, sample_weight=sample_weight).labels_
if self.n_levels > 1:
labels_old = np.copy(self.labels_)
# make room to add the sub-cluster labels
self.labels_ *= (self.n_clusters) ** (self.n_levels - 1)
for clu in range(self.n_clusters):
# fit child models on their corresponding partition of the training set
self.branches[clu].fit(
X[labels_old == clu],
sample_weight=(
sample_weight[labels_old == clu]
if sample_weight is not None
else None
),
)
self.labels_[labels_old == clu] += self.branches[clu].labels_
return self
def predict(self, X, sample_weight=None):
"""
Send X through the KMeans tree and predict the resultant
cluster. Compatible with KMeans.predict().
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
New data to predict.
sample_weight : array-like, shape (n_samples,), optional
The weights for each observation in X. If None, all
observations are assigned equal weight (default: None)
Returns
-------
labels : array, shape [n_samples,]
Index of the cluster each sample belongs to.
"""
result = self.base_model.predict(X, sample_weight=sample_weight)
if self.n_levels > 1:
rescpy = np.copy(result)
# make room to add the sub-cluster labels
result *= (self.n_clusters) ** (self.n_levels - 1)
for clu in range(self.n_clusters):
result[rescpy == clu] += self.branches[clu].predict(
X[rescpy == clu],
sample_weight=(
sample_weight[rescpy == clu]
if sample_weight is not None
else None
),
)
return result
def spatial_clusters(
coordinates,
method="Hierarchical",
max_distance=None,
n_groups=None,
verbose=False,
**kwargs
):
"""
Create spatial groups on coorindate data using either KMeans clustering
or a Gaussian Mixture model
Last modified: September 2020
Parameters
----------
n_groups : int
The number of groups to create. This is passed as 'n_clusters=n_groups'
for the KMeans algo, and 'n_components=n_groups' for the GMM. If using
method='Hierarchical' then this paramter is ignored.
coordinates : np.array
A numpy array of coordinate values e.g.
np.array([[3337270., 262400.],
[3441390., -273060.], ...])
method : str
Which algorithm to use to seperate data points. Either 'KMeans', 'GMM', or
'Hierarchical'. If using 'Hierarchical' then must set max_distance.
max_distance : int
If method is set to 'hierarchical' then maximum distance describes the
maximum euclidean distances between all observations in a cluster. 'n_groups'
is ignored in this case.
**kwargs : optional,
Additional keyword arguments to pass to sklearn.cluster.Kmeans or
sklearn.mixture.GuassianMixture depending on the 'method' argument.
Returns
-------
labels : array, shape [n_samples,]
Index of the cluster each sample belongs to.
"""
if method not in ["Hierarchical", "KMeans", "GMM"]:
raise ValueError("method must be one of: 'Hierarchical','KMeans' or 'GMM'")
if (method in ["GMM", "KMeans"]) & (n_groups is None):
raise ValueError(
"The 'GMM' and 'KMeans' methods requires explicitly setting 'n_groups'"
)
if (method == "Hierarchical") & (max_distance is None):
raise ValueError("The 'Hierarchical' method requires setting max_distance")
if method == "Hierarchical":
cluster_label = AgglomerativeClustering(
n_clusters=None,
linkage="complete",
distance_threshold=max_distance,
**kwargs
).fit_predict(coordinates)
if method == "KMeans":
cluster_label = KMeans(n_clusters=n_groups, **kwargs).fit_predict(coordinates)
if method == "GMM":
cluster_label = GaussianMixture(n_components=n_groups, **kwargs).fit_predict(
coordinates
)
if verbose:
print("n clusters = " + str(len(np.unique(cluster_label))))
return cluster_label
def SKCV(
coordinates,
n_splits,
cluster_method,
kfold_method,
test_size,
balance,
n_groups=None,
max_distance=None,
train_size=None,
random_state=None,
**kwargs
):
"""
Generate spatial k-fold cross validation indices using coordinate data.
This function wraps the 'SpatialShuffleSplit' and 'SpatialKFold' classes.
These classes ingest coordinate data in the form of an
np.array([[Eastings, northings]]) and assign samples to a spatial cluster
using either a KMeans, Gaussain Mixture, or Agglomerative Clustering algorithm.
This cross-validator is preferred over other sklearn.model_selection methods
for spatial data to avoid overestimating cross-validation scores.
This can happen because of the inherent spatial autocorrelation that is usually
associated with this type of data.
Last modified: Dec 2020
Parameters
----------
coordinates : np.array
A numpy array of coordinate values e.g.
np.array([[3337270., 262400.],
[3441390., -273060.], ...])
n_splits : int
The number of test-train cross validation splits to generate.
cluster_method : str
Which algorithm to use to seperate data points. Either 'KMeans', 'GMM', or
'Hierarchical'
kfold_method : str
One of either 'SpatialShuffleSplit' or 'SpatialKFold'. See the docs
under class:_SpatialShuffleSplit and class: _SpatialKFold for more
information on these options.
test_size : float, int, None
If float, should be between 0.0 and 1.0 and represent the proportion
of the dataset to include in the test split. If int, represents the
absolute number of test samples. If None, the value is set to the
complement of the train size. If ``train_size`` is also None, it will
be set to 0.15.
balance : int or bool
if setting kfold_method to 'SpatialShuffleSplit': int
The number of splits generated per iteration to try to balance the
amount of data in each set so that *test_size* and *train_size* are
respected. If 1, then no extra splits are generated (essentially
disabling the balacing). Must be >= 1.
if setting kfold_method to 'SpatialKFold': bool
Whether or not to split clusters into fold with approximately equal
number of data points. If False, each fold will have the same number of
clusters (which can have different number of data points in them).
n_groups : int
The number of groups to create. This is passed as 'n_clusters=n_groups'
for the KMeans algo, and 'n_components=n_groups' for the GMM. If using
cluster_method='Hierarchical' then this parameter is ignored.
max_distance : int
If method is set to 'hierarchical' then maximum distance describes the
maximum euclidean distances between all observations in a cluster. 'n_groups'
is ignored in this case.
train_size : float, int, or None
If float, should be between 0.0 and 1.0 and represent the
proportion of the dataset to include in the train split. If
int, represents the absolute number of train samples. If None,
the value is automatically set to the complement of the test size.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
**kwargs : optional,
Additional keyword arguments to pass to sklearn.cluster.Kmeans or
sklearn.mixture.GuassianMixture depending on the cluster_method argument.
Returns
--------
generator object _BaseSpatialCrossValidator.split
"""
# intiate a method
if kfold_method == "SpatialShuffleSplit":
splitter = _SpatialShuffleSplit(
n_groups=n_groups,
method=cluster_method,
coordinates=coordinates,
max_distance=max_distance,
test_size=test_size,
train_size=train_size,
n_splits=n_splits,
random_state=random_state,
balance=balance,
**kwargs
)
if kfold_method == "SpatialKFold":
splitter = _SpatialKFold(
n_groups=n_groups,
coordinates=coordinates,
max_distance=max_distance,
method=cluster_method,
test_size=test_size,
n_splits=n_splits,
random_state=random_state,
balance=balance,
**kwargs
)
return splitter
def spatial_train_test_split(
X,
y,
coordinates,
cluster_method,
kfold_method,
balance,
test_size=None,
n_splits=None,
n_groups=None,
max_distance=None,
train_size=None,
random_state=None,
**kwargs
):
"""
Split arrays into random train and test subsets. Similar to
`sklearn.model_selection.train_test_split` but instead works on
spatial coordinate data. Coordinate data is grouped according
to either a KMeans, Gaussain Mixture, or Agglomerative Clustering algorthim.
Grouping by spatial clusters is preferred over plain random splits for
spatial data to avoid overestimating validation scores due to spatial
autocorrelation.
Parameters
----------
X : np.array
Training data features
y : np.array
Training data labels
coordinates : np.array
A numpy array of coordinate values e.g.
np.array([[3337270., 262400.],
[3441390., -273060.], ...])
cluster_method : str
Which algorithm to use to seperate data points. Either 'KMeans', 'GMM', or
'Hierarchical'
kfold_method : str
One of either 'SpatialShuffleSplit' or 'SpatialKFold'. See the docs
under class:_SpatialShuffleSplit and class: _SpatialKFold for more
information on these options.
balance : int or bool
if setting kfold_method to 'SpatialShuffleSplit': int
The number of splits generated per iteration to try to balance the
amount of data in each set so that *test_size* and *train_size* are
respected. If 1, then no extra splits are generated (essentially
disabling the balacing). Must be >= 1.
if setting kfold_method to 'SpatialKFold': bool
Whether or not to split clusters into fold with approximately equal
number of data points. If False, each fold will have the same number of
clusters (which can have different number of data points in them).
test_size : float, int, None
If float, should be between 0.0 and 1.0 and represent the proportion
of the dataset to include in the test split. If int, represents the
absolute number of test samples. If None, the value is set to the
complement of the train size. If ``train_size`` is also None, it will
be set to 0.15.
n_splits : int
This parameter is invoked for the 'SpatialKFold' folding method, use this
number to satisfy the train-test size ratio desired, as the 'test_size'
parameter for the KFold method often fails to get the ratio right.
n_groups : int
The number of groups to create. This is passed as 'n_clusters=n_groups'
for the KMeans algo, and 'n_components=n_groups' for the GMM. If using
cluster_method='Hierarchical' then this parameter is ignored.
max_distance : int
If method is set to 'hierarchical' then maximum distance describes the
maximum euclidean distances between all observations in a cluster. 'n_groups'
is ignored in this case.
train_size : float, int, or None
If float, should be between 0.0 and 1.0 and represent the
proportion of the dataset to include in the train split. If
int, represents the absolute number of train samples. If None,
the value is automatically set to the complement of the test size.
random_state : int,
RandomState instance or None, optional
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
**kwargs : optional,
Additional keyword arguments to pass to sklearn.cluster.Kmeans or
sklearn.mixture.GuassianMixture depending on the cluster_method argument.
Returns
-------
Tuple :
Contains four arrays in the following order:
X_train, X_test, y_train, y_test
"""
if kfold_method == "SpatialShuffleSplit":
splitter = _SpatialShuffleSplit(
n_groups=n_groups,
method=cluster_method,
coordinates=coordinates,
max_distance=max_distance,
test_size=test_size,
train_size=train_size,
n_splits=1 if n_splits is None else n_splits,
random_state=random_state,
balance=balance,
**kwargs
)
if kfold_method == "SpatialKFold":
if n_splits is None:
raise ValueError(
"n_splits parameter requires an integer value, eg. 'n_splits=5'"
)
if (test_size is not None) or (train_size is not None):
warnings.warn(
"With the 'SpatialKFold' method, controlling the test/train ratio "
"is better achieved using the 'n_splits' parameter"
)
splitter = _SpatialKFold(
n_groups=n_groups,
coordinates=coordinates,
max_distance=max_distance,
method=cluster_method,
n_splits=n_splits,
random_state=random_state,
balance=balance,
**kwargs
)
lst = []
for train, test in splitter.split(coordinates):
X_tr, X_tt = X[train, :], X[test, :]
y_tr, y_tt = y[train], y[test]
lst.extend([X_tr, X_tt, y_tr, y_tt])
return (lst[0], lst[1], lst[2], lst[3])
def _partition_by_sum(array, parts):
"""
Partition an array into parts of approximately equal sum.
Does not change the order of the array elements.
Produces the partition indices on the array. Use :func:`numpy.split` to
divide the array along these indices.
Parameters
----------
array : array or array-like
The 1D array that will be partitioned. The array will be raveled before
computations.
parts : int
Number of parts to split the array. Can be at most the number of
elements in the array.
Returns
-------
indices : array
The indices in which the array should be split.
Notes
-----
Solution from https://stackoverflow.com/a/54024280
"""
array = np.atleast_1d(array).ravel()
if parts > array.size:
raise ValueError(
"Cannot partition an array of size {} into {} parts of equal sum.".format(
array.size, parts
)
)
cumulative_sum = array.cumsum()
# Ideally, we want each part to have the same number of points (total /
# parts).
ideal_sum = cumulative_sum[-1] // parts
# If the parts are ideal, the cumulative sum of each part will be this
ideal_cumsum = np.arange(1, parts) * ideal_sum
indices = np.searchsorted(cumulative_sum, ideal_cumsum, side="right")
# Check for repeated split points, which indicates that there is no way to
# split the array.
if np.unique(indices).size != indices.size:
raise ValueError(
"Could not find partition points to split the array into {} parts "
"of equal sum.".format(parts)
)
return indices
class _BaseSpatialCrossValidator(BaseCrossValidator, metaclass=ABCMeta):
"""
Base class for spatial cross-validators.
Parameters
----------
n_groups : int
The number of groups to create. This is passed as 'n_clusters=n_groups'
for the KMeans algo, and 'n_components=n_groups' for the GMM.
coordinates : np.array
A numpy array of coordinate values e.g.
np.array([[3337270., 262400.],
[3441390., -273060.], ...,
method : str
Which algorithm to use to seperate data points. Either 'KMeans' or 'GMM'
n_splits : int
Number of splitting iterations.
"""
def __init__(
self,
n_groups=None,
coordinates=None,
method=None,
max_distance=None,
n_splits=None,
):
self.n_groups = n_groups
self.coordinates = coordinates
self.method = method
self.max_distance = max_distance
self.n_splits = n_splits
def split(self, X, y=None, groups=None):
"""
Generate indices to split data into training and test set.
Parameters
----------
X : array-like, shape (n_samples, 2)
Columns should be the easting and northing coordinates of data
points, respectively.
y : array-like, shape (n_samples,)
The target variable for supervised learning problems. Always
ignored.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set. Always ignored.
Yields
------
train : ndarray
The training set indices for that split.
test : ndarray
The testing set indices for that split.
"""
if X.shape[1] != 2:
raise ValueError(
"X (the coordinate data) must have exactly 2 columns ({} given).".format(
X.shape[1]
)
)
for train, test in super().split(X, y, groups):
yield train, test
def get_n_splits(self, X=None, y=None, groups=None):
"""
Returns the number of splitting iterations in the cross-validator
Parameters
----------
X : object
Always ignored, exists for compatibility.
y : object
Always ignored, exists for compatibility.
groups : object
Always ignored, exists for compatibility.
Returns
-------
n_splits : int
Returns the number of splitting iterations in the cross-validator.
"""
return self.n_splits
@abstractmethod
def _iter_test_indices(self, X=None, y=None, groups=None):
"""
Generates integer indices corresponding to test sets.
MUST BE IMPLEMENTED BY DERIVED CLASSES.
Parameters
----------
X : array-like, shape (n_samples, 2)
Columns should be the easting and northing coordinates of data
points, respectively.
y : array-like, shape (n_samples,)
The target variable for supervised learning problems. Always
ignored.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set. Always ignored.
Yields
------
test : ndarray
The testing set indices for that split.
"""
class _SpatialShuffleSplit(_BaseSpatialCrossValidator):
"""
Random permutation of spatial cross-validator.
Yields indices to split data into training and test sets. Data are first
grouped into clusters using either a KMeans or GMM algorithm
and are then split into testing and training sets randomly.
The proportion of clusters assigned to each set is controlled by *test_size*
and/or *train_size*. However, the total amount of actual data points in
each set could be different from these values since clusters can have
a different number of data points inside them. To guarantee that the
proportion of actual data is as close as possible to the proportion of
clusters, this cross-validator generates an extra number of splits and
selects the one with proportion of data points in each set closer to the
desired amount. The number of balance splits per
iteration is controlled by the *balance* argument.
This cross-validator is preferred over `sklearn.model_selection.ShuffleSplit`
for spatial data to avoid overestimating cross-validation scores.
This can happen because of the inherent spatial autocorrelation.
Parameters
----------
n_groups : int
The number of groups to create. This is passed as 'n_clusters=n_groups'
for the KMeans algo, and 'n_components=n_groups' for the GMM. If using
cluster_method='Hierarchical' then this parameter is ignored.
coordinates : np.array
A numpy array of coordinate values e.g.
np.array([[3337270., 262400.],
[3441390., -273060.], ...])
cluster_method : str
Which algorithm to use to seperate data points. Either 'KMeans', 'GMM', or
'Hierarchical'
max_distance : int
If method is set to 'hierarchical' then maximum distance describes the
maximum euclidean distances between all observations in a cluster. 'n_groups'
is ignored in this case.
n_splits : int,
Number of re-shuffling & splitting iterations.
test_size : float, int, None
If float, should be between 0.0 and 1.0 and represent the proportion
of the dataset to include in the test split. If int, represents the
absolute number of test samples. If None, the value is set to the
complement of the train size. If ``train_size`` is also None, it will
be set to 0.1.
train_size : float, int, or None
If float, should be between 0.0 and 1.0 and represent the
proportion of the dataset to include in the train split. If
int, represents the absolute number of train samples. If None,
the value is automatically set to the complement of the test size.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
balance : int
The number of splits generated per iteration to try to balance the
amount of data in each set so that *test_size* and *train_size* are
respected. If 1, then no extra splits are generated (essentially
disabling the balacing). Must be >= 1.
**kwargs : optional,
Additional keyword arguments to pass to sklearn.cluster.Kmeans or
sklearn.mixture.GuassianMixture depending on the cluster_method argument.
Returns
--------
generator
containing indices to split data into training and test sets
"""
def __init__(
self,
n_groups=None,
coordinates=None,
method="Heirachical",
max_distance=None,
n_splits=None,
test_size=0.15,
train_size=None,
random_state=None,
balance=10,
**kwargs
):
super().__init__(
n_groups=n_groups,
coordinates=coordinates,
method=method,
max_distance=max_distance,
n_splits=n_splits,
**kwargs
)
if balance < 1:
raise ValueError(
"The *balance* argument must be >= 1. To disable balance, use 1."
)
self.test_size = test_size
self.train_size = train_size
self.random_state = random_state
self.balance = balance
self.kwargs = kwargs
def _iter_test_indices(self, X=None, y=None, groups=None):
"""
Generates integer indices corresponding to test sets.
Runs several iterations until a split is found that yields clusters with
the right amount of data points in it.
Parameters
----------
X : array-like, shape (n_samples, 2)
Columns should be the easting and northing coordinates of data
points, respectively.
y : array-like, shape (n_samples,)
The target variable for supervised learning problems. Always
ignored.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set. Always ignored.
Yields
------
test : ndarray
The testing set indices for that split.
"""
labels = spatial_clusters(
n_groups=self.n_groups,
coordinates=self.coordinates,
method=self.method,
max_distance=self.max_distance,
**self.kwargs
)
cluster_ids = np.unique(labels)
# Generate many more splits so that we can pick and choose the ones
# that have the right balance of training and testing data.
shuffle = ShuffleSplit(
n_splits=self.n_splits * self.balance,
test_size=self.test_size,
train_size=self.train_size,
random_state=self.random_state,
).split(cluster_ids)
for _ in range(self.n_splits):
test_sets, balance = [], []
for _ in range(self.balance):
# This is a false positive in pylint which is why the warning
# is disabled at the top of this file:
# https://github.com/PyCQA/pylint/issues/1830
# pylint: disable=stop-iteration-return
train_clusters, test_clusters = next(shuffle)
# pylint: enable=stop-iteration-return
train_points = np.where(np.isin(labels, cluster_ids[train_clusters]))[0]
test_points = np.where(np.isin(labels, cluster_ids[test_clusters]))[0]
# The proportion of data points assigned to each group should
# be close the proportion of clusters assigned to each group.
balance.append(
abs(
train_points.size / test_points.size
- train_clusters.size / test_clusters.size
)
)
test_sets.append(test_points)
best = np.argmin(balance)
yield test_sets[best]
class _SpatialKFold(_BaseSpatialCrossValidator):
"""
Spatial K-Folds cross-validator.
Yields indices to split data into training and test sets. Data are first
grouped into clusters using either a KMeans or GMM algorithm
clusters. The clusters are then split into testing and training sets iteratively
along k folds of the data (k is given by *n_splits*).
By default, the clusters are split into folds in a way that makes each fold
have approximately the same number of data points. Sometimes this might not
be possible, which can happen if the number of splits is close to the
number of clusters. In these cases, each fold will have the same number of
clusters regardless of how many data points are in each cluster. This
behaviour can also be disabled by setting ``balance=False``.
This cross-validator is preferred over `sklearn.model_selection.KFold` for
spatial data to avoid overestimating cross-validation scores. This can happen
because of the inherent autocorrelation that is usually associated with
this type of data.
Parameters
----------
n_groups : int
The number of groups to create. This is passed as 'n_clusters=n_groups'
for the KMeans algo, and 'n_components=n_groups' for the GMM. If using
cluster_method='Hierarchical' then this parameter is ignored.
coordinates : np.array
A numpy array of coordinate values e.g.
np.array([[3337270., 262400.],
[3441390., -273060.], ...])
cluster_method : str
Which algorithm to use to seperate data points. Either 'KMeans', 'GMM', or
'Hierarchical'
max_distance : int
If method is set to 'hierarchical' then maximum distance describes the
maximum euclidean distances between all observations in a cluster. 'n_groups'
is ignored in this case.
n_splits : int
Number of folds. Must be at least 2.
shuffle : bool
Whether to shuffle the data before splitting into batches.
random_state : int, RandomState instance or None, optional (defasult=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
balance : bool
Whether or not to split clusters into fold with approximately equal
number of data points. If False, each fold will have the same number of
clusters (which can have different number of data points in them).
**kwargs : optional,
Additional keyword arguments to pass to sklearn.cluster.Kmeans or
sklearn.mixture.GuassianMixture depending on the cluster_method argument.
"""
def __init__(
self,
n_groups=None,
coordinates=None,
method="Heirachical",
max_distance=None,
n_splits=5,
test_size=0.15,
train_size=None,
shuffle=True,
random_state=None,
balance=True,
**kwargs
):
super().__init__(
n_groups=n_groups,
coordinates=coordinates,
method=method,
max_distance=max_distance,
n_splits=n_splits,
**kwargs
)
if n_splits < 2:
raise ValueError(
"Number of splits must be >=2 for clusterKFold. Given {}.".format(
n_splits
)
)
self.test_size = test_size
self.shuffle = shuffle
self.random_state = random_state
self.balance = balance
self.kwargs = kwargs
def _iter_test_indices(self, X=None, y=None, groups=None):
"""
Generates integer indices corresponding to test sets.
Parameters
----------
X : array-like, shape (n_samples, 2)
Columns should be the easting and northing coordinates of data
points, respectively.
y : array-like, shape (n_samples,)
The target variable for supervised learning problems. Always
ignored.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set. Always ignored.
Yields
------
test : ndarray
The testing set indices for that split.
"""
labels = spatial_clusters(
n_groups=self.n_groups,
coordinates=self.coordinates,
method=self.method,
max_distance=self.max_distance,
**self.kwargs
)
cluster_ids = np.unique(labels)
if self.n_splits > cluster_ids.size:
raise ValueError(
"Number of k-fold splits ({}) cannot be greater than the number of "
"clusters ({}). Either decrease n_splits or increase the number of "
"clusters.".format(self.n_splits, cluster_ids.size)
)
if self.shuffle:
check_random_state(self.random_state).shuffle(cluster_ids)
if self.balance:
cluster_sizes = [np.isin(labels, i).sum() for i in cluster_ids]
try:
split_points = _partition_by_sum(cluster_sizes, parts=self.n_splits)
folds = np.split(np.arange(cluster_ids.size), split_points)
except ValueError:
warnings.warn(
"Could not balance folds to have approximately the same "
"number of data points. Dividing into folds with equal "
"number of clusters instead. Decreasing n_splits or increasing "
"the number of clusters may help.",
UserWarning,
)
folds = [i for _, i in KFold(n_splits=self.n_splits).split(cluster_ids)]
else:
folds = [i for _, i in KFold(n_splits=self.n_splits).split(cluster_ids)]
for test_clusters in folds:
test_points = np.where(np.isin(labels, cluster_ids[test_clusters]))[0]
yield test_points
|
py | b41647734670431ad866bc43ae52ba1aa02cdb95 | import random
from sympy.mpmath import *
from sympy.mpmath.libmp import *
def test_basic_string():
"""
Test basic string conversion
"""
mp.dps = 15
assert mpf('3') == mpf('3.0') == mpf('0003.') == mpf('0.03e2') == mpf(3.0)
assert mpf('30') == mpf('30.0') == mpf('00030.') == mpf(30.0)
for i in range(10):
for j in range(10):
assert mpf('%ie%i' % (i,j)) == i * 10**j
assert str(mpf('25000.0')) == '25000.0'
assert str(mpf('2500.0')) == '2500.0'
assert str(mpf('250.0')) == '250.0'
assert str(mpf('25.0')) == '25.0'
assert str(mpf('2.5')) == '2.5'
assert str(mpf('0.25')) == '0.25'
assert str(mpf('0.025')) == '0.025'
assert str(mpf('0.0025')) == '0.0025'
assert str(mpf('0.00025')) == '0.00025'
assert str(mpf('0.000025')) == '2.5e-5'
assert str(mpf(0)) == '0.0'
assert str(mpf('2.5e1000000000000000000000')) == '2.5e+1000000000000000000000'
assert str(mpf('2.6e-1000000000000000000000')) == '2.6e-1000000000000000000000'
assert str(mpf(1.23402834e-15)) == '1.23402834e-15'
assert str(mpf(-1.23402834e-15)) == '-1.23402834e-15'
assert str(mpf(-1.2344e-15)) == '-1.2344e-15'
assert repr(mpf(-1.2344e-15)) == "mpf('-1.2343999999999999e-15')"
def test_pretty():
mp.pretty = True
assert repr(mpf(2.5)) == '2.5'
assert repr(mpc(2.5,3.5)) == '(2.5 + 3.5j)'
mp.pretty = False
iv.pretty = True
assert repr(mpi(2.5,3.5)) == '[2.5, 3.5]'
iv.pretty = False
def test_str_whitespace():
assert mpf('1.26 ') == 1.26
def test_unicode():
mp.dps = 15
assert mpf(u'2.76') == 2.76
assert mpf(u'inf') == inf
def test_str_format():
assert to_str(from_float(0.1),15,strip_zeros=False) == '0.100000000000000'
assert to_str(from_float(0.0),15,show_zero_exponent=True) == '0.0e+0'
assert to_str(from_float(0.0),0,show_zero_exponent=True) == '.0e+0'
assert to_str(from_float(0.0),0,show_zero_exponent=False) == '.0'
assert to_str(from_float(0.0),1,show_zero_exponent=True) == '0.0e+0'
assert to_str(from_float(0.0),1,show_zero_exponent=False) == '0.0'
assert to_str(from_float(1.23),3,show_zero_exponent=True) == '1.23e+0'
assert to_str(from_float(1.23456789000000e-2),15,strip_zeros=False,min_fixed=0,max_fixed=0) == '1.23456789000000e-2'
assert to_str(from_float(1.23456789000000e+2),15,strip_zeros=False,min_fixed=0,max_fixed=0) == '1.23456789000000e+2'
assert to_str(from_float(2.1287e14), 15, max_fixed=1000) == '212870000000000.0'
assert to_str(from_float(2.1287e15), 15, max_fixed=1000) == '2128700000000000.0'
assert to_str(from_float(2.1287e16), 15, max_fixed=1000) == '21287000000000000.0'
assert to_str(from_float(2.1287e30), 15, max_fixed=1000) == '2128700000000000000000000000000.0'
def test_tight_string_conversion():
mp.dps = 15
# In an old version, '0.5' wasn't recognized as representing
# an exact binary number and was erroneously rounded up or down
assert from_str('0.5', 10, round_floor) == fhalf
assert from_str('0.5', 10, round_ceiling) == fhalf
def test_eval_repr_invariant():
"""Test that eval(repr(x)) == x"""
random.seed(123)
for dps in [10, 15, 20, 50, 100]:
mp.dps = dps
for i in xrange(1000):
a = mpf(random.random())**0.5 * 10**random.randint(-100, 100)
assert eval(repr(a)) == a
mp.dps = 15
def test_str_bugs():
mp.dps = 15
# Decimal rounding used to give the wrong exponent in some cases
assert str(mpf('1e600')) == '1.0e+600'
assert str(mpf('1e10000')) == '1.0e+10000'
def test_str_prec0():
assert to_str(from_float(1.234), 0) == '.0e+0'
assert to_str(from_float(1e-15), 0) == '.0e-15'
assert to_str(from_float(1e+15), 0) == '.0e+15'
assert to_str(from_float(-1e-15), 0) == '-.0e-15'
assert to_str(from_float(-1e+15), 0) == '-.0e+15'
def test_convert_rational():
mp.dps = 15
assert from_rational(30, 5, 53, round_nearest) == (0, 3, 1, 2)
assert from_rational(-7, 4, 53, round_nearest) == (1, 7, -2, 3)
assert to_rational((0, 1, -1, 1)) == (1, 2)
def test_custom_class():
class mympf:
@property
def _mpf_(self):
return mpf(3.5)._mpf_
class mympc:
@property
def _mpc_(self):
return mpf(3.5)._mpf_, mpf(2.5)._mpf_
assert mpf(2) + mympf() == 5.5
assert mympf() + mpf(2) == 5.5
assert mpf(mympf()) == 3.5
assert mympc() + mpc(2) == mpc(5.5, 2.5)
assert mpc(2) + mympc() == mpc(5.5, 2.5)
assert mpc(mympc()) == (3.5+2.5j)
def test_conversion_methods():
class SomethingRandom:
pass
class SomethingReal:
def _mpmath_(self, prec, rounding):
return mp.make_mpf(from_str('1.3', prec, rounding))
class SomethingComplex:
def _mpmath_(self, prec, rounding):
return mp.make_mpc((from_str('1.3', prec, rounding), \
from_str('1.7', prec, rounding)))
x = mpf(3)
z = mpc(3)
a = SomethingRandom()
y = SomethingReal()
w = SomethingComplex()
for d in [15, 45]:
mp.dps = d
assert (x+y).ae(mpf('4.3'))
assert (y+x).ae(mpf('4.3'))
assert (x+w).ae(mpc('4.3', '1.7'))
assert (w+x).ae(mpc('4.3', '1.7'))
assert (z+y).ae(mpc('4.3'))
assert (y+z).ae(mpc('4.3'))
assert (z+w).ae(mpc('4.3', '1.7'))
assert (w+z).ae(mpc('4.3', '1.7'))
x-y; y-x; x-w; w-x; z-y; y-z; z-w; w-z
x*y; y*x; x*w; w*x; z*y; y*z; z*w; w*z
x/y; y/x; x/w; w/x; z/y; y/z; z/w; w/z
x**y; y**x; x**w; w**x; z**y; y**z; z**w; w**z
x==y; y==x; x==w; w==x; z==y; y==z; z==w; w==z
mp.dps = 15
assert x.__add__(a) is NotImplemented
assert x.__radd__(a) is NotImplemented
assert x.__lt__(a) is NotImplemented
assert x.__gt__(a) is NotImplemented
assert x.__le__(a) is NotImplemented
assert x.__ge__(a) is NotImplemented
assert x.__eq__(a) is NotImplemented
assert x.__ne__(a) is NotImplemented
# implementation detail
if hasattr(x, "__cmp__"):
assert x.__cmp__(a) is NotImplemented
assert x.__sub__(a) is NotImplemented
assert x.__rsub__(a) is NotImplemented
assert x.__mul__(a) is NotImplemented
assert x.__rmul__(a) is NotImplemented
assert x.__div__(a) is NotImplemented
assert x.__rdiv__(a) is NotImplemented
assert x.__mod__(a) is NotImplemented
assert x.__rmod__(a) is NotImplemented
assert x.__pow__(a) is NotImplemented
assert x.__rpow__(a) is NotImplemented
assert z.__add__(a) is NotImplemented
assert z.__radd__(a) is NotImplemented
assert z.__eq__(a) is NotImplemented
assert z.__ne__(a) is NotImplemented
assert z.__sub__(a) is NotImplemented
assert z.__rsub__(a) is NotImplemented
assert z.__mul__(a) is NotImplemented
assert z.__rmul__(a) is NotImplemented
assert z.__div__(a) is NotImplemented
assert z.__rdiv__(a) is NotImplemented
assert z.__pow__(a) is NotImplemented
assert z.__rpow__(a) is NotImplemented
def test_mpmathify():
assert mpmathify('1/2') == 0.5
assert mpmathify('(1.0+1.0j)') == mpc(1, 1)
assert mpmathify('(1.2e-10 - 3.4e5j)') == mpc('1.2e-10', '-3.4e5')
assert mpmathify('1j') == mpc(1j)
|
py | b4164935817e0423dcc0f426843782af9a83ab76 | from pytorch_pretrained_bert.file_utils import cached_path
import os
import shutil
files = [
'https://raw.githubusercontent.com/glample/tagger/master/dataset/eng.testa',
'https://raw.githubusercontent.com/glample/tagger/master/dataset/eng.testb',
'https://raw.githubusercontent.com/glample/tagger/master/dataset/eng.train',
]
target_dir = 'data/conll2003'
os.makedirs(target_dir, exist_ok=True)
for fname in files:
name = os.path.basename(fname)
shutil.copyfile(cached_path(fname), os.path.join(target_dir, name))
print(fname, '=>', name)
|
py | b416493dcebfdf34989f7c91545d38700df8e3dd | # basic REPL tests
print(1)
[A
|
py | b41649f6368b5a63774067510e98c217142ca5bf | """
Contributed by Wenbin Li & Jinglin Xu
"""
import torch
import torch.nn as nn
from torch.nn import init
import functools
def weights_init_normal(m):
classname = m.__class__.__name__
# print(classname)
if classname.find('Conv') != -1:
init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('Linear') != -1:
init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm2d') != -1:
init.normal_(m.weight.data, 1.0, 0.02)
init.constant_(m.bias.data, 0.0)
def weights_init_xavier(m):
classname = m.__class__.__name__
# print(classname)
if classname.find('Conv') != -1:
init.xavier_normal_(m.weight.data, gain=0.02)
elif classname.find('Linear') != -1:
init.xavier_normal_(m.weight.data, gain=0.02)
elif classname.find('BatchNorm2d') != -1:
init.normal_(m.weight.data, 1.0, 0.02)
init.constant_(m.bias.data, 0.0)
def weights_init_kaiming(m):
classname = m.__class__.__name__
# print(classname)
if classname.find('Conv') != -1:
init.kaiming_normal_(m.weight.data, a=0, mode='fan_in')
elif classname.find('Linear') != -1:
init.kaiming_normal_(m.weight.data, a=0, mode='fan_in')
elif classname.find('BatchNorm2d') != -1:
init.normal_(m.weight.data, 1.0, 0.02)
init.constant_(m.bias.data, 0.0)
def weights_init_orthogonal(m):
classname = m.__class__.__name__
print(classname)
if classname.find('Conv') != -1:
init.orthogonal_(m.weight.data, gain=1)
elif classname.find('Linear') != -1:
init.orthogonal_(m.weight.data, gain=1)
elif classname.find('BatchNorm2d') != -1:
init.normal_(m.weight.data, 1.0, 0.02)
init.constant_(m.bias.data, 0.0)
def init_weights(net, init_type='normal'):
print('initialization method [%s]' % init_type)
if init_type == 'normal':
net.apply(weights_init_normal)
elif init_type == 'xavier':
net.apply(weights_init_xavier)
elif init_type == 'kaiming':
net.apply(weights_init_kaiming)
elif init_type == 'orthogonal':
net.apply(weights_init_orthogonal)
else:
raise NotImplementedError('initialization method [%s] is not implemented' % init_type)
def get_norm_layer(norm_type='instance'):
if norm_type == 'batch':
norm_layer = functools.partial(nn.BatchNorm2d, affine=True)
elif norm_type == 'instance':
norm_layer = functools.partial(nn.InstanceNorm2d, affine=False)
elif norm_type == 'none':
norm_layer = None
else:
raise NotImplementedError('normalization layer [%s] is not found' % norm_type)
return norm_layer
def define_MultiViewNet(pretrained=False, model_root=None, which_model='multiviewNet', norm='batch', init_type='normal',
use_gpu=True, num_classes=6, num_view=5, view_list=None, fea_out=200, fea_com=300, **kwargs):
MultiviewNet = None
norm_layer = get_norm_layer(norm_type=norm)
if use_gpu:
assert(torch.cuda.is_available())
if which_model == 'multiviewNet':
MultiviewNet = MultiViewNet(num_classes=num_classes, num_view=num_view, view_list=view_list,
fea_out=fea_out, fea_com=fea_com, **kwargs)
else:
raise NotImplementedError('Model name [%s] is not recognized' % which_model)
init_weights(MultiviewNet, init_type=init_type)
if use_gpu:
MultiviewNet.cuda()
if pretrained:
MultiviewNet.load_state_dict(model_root)
return MultiviewNet
def print_network(net):
num_params = 0
for param in net.parameters():
num_params += param.numel()
print(net)
print('Total number of parameters: %d' % num_params)
class AttrProxy(object):
"""Translates index lookups into attribute lookups."""
def __init__(self, module, prefix):
self.module = module
self.prefix = prefix
def __getitem__(self, i):
return getattr(self.module, self.prefix + str(i))
class MultiViewNet(nn.Module):
def __init__(self, num_classes, num_view, view_list, fea_out, fea_com):
super(MultiViewNet, self).__init__()
# list of the linear layer
self.linear = []
for i in range(len(view_list)):
self.add_module('linear_'+str(i), nn.Sequential(
nn.Linear(view_list[i], 2 * fea_out).cuda(),
nn.BatchNorm1d(2 * fea_out).cuda(),
nn.ReLU(inplace=True).cuda(),
nn.Dropout().cuda(),
nn.Linear(2 * fea_out, fea_out).cuda(),
nn.BatchNorm1d(fea_out).cuda(),
nn.ReLU(inplace=True).cuda()
)
)
self.linear = AttrProxy(self, 'linear_')
self.relation_out = RelationBlock_Out()
self.classifier_out = nn.Sequential(
nn.Linear(num_view * fea_out, fea_com),
nn.BatchNorm1d(fea_com),
nn.ReLU(inplace=True),
nn.Dropout(),
nn.Linear(fea_com, num_classes),
nn.BatchNorm1d(num_classes)
)
def forward(self, input):
# extract features of inputs
Fea_list = []
for input_item, linear_item in zip(input, self.linear):
fea_temp = linear_item(input_item)
Fea_list.append(fea_temp)
Relation_fea = self.relation_out(Fea_list)
Fea_Relation_list = []
for k in range(len(Fea_list)):
Fea_Relation_temp = torch.cat((Fea_list[k], Relation_fea[k]), 1)
Fea_Relation_list.append(self.classifier_out(Fea_Relation_temp))
return Fea_Relation_list
class RelationBlock_Out(nn.Module):
def __init__(self):
super(RelationBlock_Out, self).__init__()
self.linear_out = nn.Sequential(
nn.Linear(200*200, 200),
nn.BatchNorm1d(200),
nn.ReLU(inplace=True)
)
def cal_relation(self, input1, input2):
input1 = input1.unsqueeze(2)
input2 = input2.unsqueeze(1)
outproduct = torch.bmm(input1, input2)
return outproduct
def forward(self, x):
relation_eachview_list = []
for i in range(len(x)):
relation_list = []
for j in range(len(x)):
relation_temp = self.cal_relation(x[i], x[j])
relation_temp = relation_temp.view(relation_temp.size(0), 200*200)
relation_temp = self.linear_out(relation_temp)
relation_list.append(relation_temp)
relation_list.pop(i)
relation_eachview_temp = torch.cat(relation_list, 1)
relation_eachview_list.append(relation_eachview_temp)
return relation_eachview_list
|
py | b4164a0e929880987a9374b7030e0e61e721f71e | #!/usr/bin/env python
__copyright__ = "Copyright 2013-2014, http://radical.rutgers.edu"
__license__ = "MIT"
import os
import radical.pilot as rp
verbose = os.environ.get('RADICAL_PILOT_VERBOSE', 'REPORT')
os.environ['RADICAL_PILOT_VERBOSE'] = verbose
""" DESCRIPTION: Tutorial 2: Chaining Tasks.
For every task A_n a task B_n is started consecutively.
"""
# READ: The RADICAL-Pilot documentation:
# https://radicalpilot.readthedocs.io/en/stable/
#
# Try running this example with RADICAL_PILOT_VERBOSE=debug set if
# you want to see what happens behind the scenes!
# ------------------------------------------------------------------------------
#
if __name__ == "__main__":
RESOURCE_LABEL = None
PILOT_CORES = None
NUMBER_CHAINS = None
CU_A_EXECUTABLE = None
CU_B_EXECUTABLE = None
QUEUE = None
# Create a new session. No need to try/except this: if session creation
# fails, there is not much we can do anyways...
session = rp.Session()
# all other pilot code is now tried/excepted. If an exception is caught, we
# can rely on the session object to exist and be valid, and we can thus tear
# the whole RP stack down via a 'session.close()' call in the 'finally'
# clause...
try:
# ----- CHANGE THIS -- CHANGE THIS -- CHANGE THIS -- CHANGE THIS ------
#
# Change the user name below if you are using a remote resource
# and your username on that resource is different from the username
# on your local machine.
#
# Add a Pilot Manager. Pilot managers manage one or more ComputePilots.
print("Initializing Pilot Manager ...")
pmgr = rp.PilotManager(session=session)
# ----- CHANGE THIS -- CHANGE THIS -- CHANGE THIS -- CHANGE THIS ------
#
# If you want to run this example on your local machine, you don't have
# to change anything here.
#
# Change the resource below if you want to run on a remote resource.
# You also might have to set the 'project' to your allocation ID if
# your remote resource does compute time accounting.
#
# A list of preconfigured resources can be found at:
# https://radicalpilot.readthedocs.io/en/stable/ \
# machconf.html#preconfigured-resources
#
pdesc = rp.ComputePilotDescription ()
pdesc.resource = RESOURCE_LABEL
pdesc.runtime = 30
pdesc.cores = PILOT_CORES
pdesc.cleanup = True
# submit the pilot.
print("Submitting Compute Pilot to Pilot Manager ...")
pilot = pmgr.submit_pilots(pdesc)
# Combine the ComputePilot, the ComputeUnits and a scheduler via
# a UnitManager object.
print("Initializing Unit Manager ...")
umgr = rp.UnitManager (session=session)
# Add the created ComputePilot to the UnitManager.
print("Registering Compute Pilot with Unit Manager ...")
umgr.add_pilots(pilot)
# submit A cus to pilot job
cudesc_list_A = []
for i in range(NUMBER_CHAINS):
# -------- BEGIN USER DEFINED CU A_n DESCRIPTION --------- #
cudesc = rp.ComputeUnitDescription()
cudesc.environment = {"CU_LIST": "A", "CU_NO": "%02d" % i}
cudesc.executable = CU_A_EXECUTABLE
cudesc.arguments = ['"$CU_LIST CU with id $CU_NO"']
cudesc.cores = 1
# -------- END USER DEFINED CU A_n DESCRIPTION --------- #
cudesc_list_A.append(cudesc)
# Submit the previously created ComputeUnit descriptions to the
# PilotManager. This will trigger the selected scheduler to start
# assigning ComputeUnits to the ComputePilots.
print("Submit 'A' Compute Units to Unit Manager ...")
cu_list_A = umgr.submit_units(cudesc_list_A)
# Chaining cus i.e submit a compute unit, when compute unit from A is
# successfully executed. A B CU reads the content of the output file of
# an A CU and writes it into its own output file.
cu_list_B = []
# We create a copy of cu_list_A so that we can remove elements from it,
# and still reference to the original index.
cu_list_A_copy = cu_list_A[:]
while cu_list_A:
for cu_a in cu_list_A:
idx = cu_list_A_copy.index(cu_a)
cu_a.wait ()
print("'A' Compute Unit '%s' done. Submitting 'B' CU ..." % idx)
# -------- BEGIN USER DEFINED CU B_n DESCRIPTION --------- #
cudesc = rp.ComputeUnitDescription()
cudesc.environment = {'CU_LIST': 'B', 'CU_NO': "%02d" % idx}
cudesc.executable = CU_B_EXECUTABLE
cudesc.arguments = ['"$CU_LIST CU with id $CU_NO"']
cudesc.cores = 1
# -------- END USER DEFINED CU B_n DESCRIPTION --------- #
# Submit CU to Pilot Job
cu_b = umgr.submit_units(cudesc)
cu_list_B.append(cu_b)
cu_list_A.remove(cu_a)
print("Waiting for 'B' Compute Units to complete ...")
for cu_b in cu_list_B :
cu_b.wait ()
print("'B' Compute Unit '%s' finished with output:" % (cu_b.uid))
print(cu_b.stdout)
print("All Compute Units completed successfully!")
except Exception as e:
# Something unexpected happened in the pilot code above
print("caught Exception: %s" % e)
raise
except (KeyboardInterrupt, SystemExit) as e:
# the callback called sys.exit(), and we can here catch the
# corresponding KeyboardInterrupt exception for shutdown. We also catch
# SystemExit (which gets raised if the main threads exits for some other
# reason).
print("need to exit now: %s" % e)
finally:
# always clean up the session, no matter if we caught an exception or
# not.
print("closing session")
session.close ()
# the above is equivalent to
#
# session.close (cleanup=True, terminate=True)
#
# it will thus both clean out the session's database record, and kill
# all remaining pilots (none in our example).
# ------------------------------------------------------------------------------
|
py | b4164a9ff8e7314d6b7ec82cfa6b46c1b9c2a037 | import os
import numpy as np
from pandas import DataFrame, Series
from autogluon import try_import_lightgbm
from ...constants import BINARY, MULTICLASS, REGRESSION
# Mapping to specialized LightGBM metrics that are much faster than the standard metric computation
_ag_to_lgbm_metric_dict = {
BINARY: dict(
accuracy='binary_error',
log_loss='binary_logloss',
roc_auc='auc',
),
MULTICLASS: dict(
accuracy='multi_error',
log_loss='multi_logloss',
),
REGRESSION: dict(
mean_absolute_error='l1',
mean_squared_error='l2',
root_mean_squared_error='rmse',
),
}
def convert_ag_metric_to_lgbm(ag_metric_name, problem_type):
return _ag_to_lgbm_metric_dict.get(problem_type, dict()).get(ag_metric_name, None)
def func_generator(metric, is_higher_better, needs_pred_proba, problem_type):
if needs_pred_proba:
if problem_type == MULTICLASS:
def function_template(y_hat, data):
y_true = data.get_label()
y_hat = y_hat.reshape(len(np.unique(y_true)), -1).T
return metric.name, metric(y_true, y_hat), is_higher_better
else:
def function_template(y_hat, data):
y_true = data.get_label()
return metric.name, metric(y_true, y_hat), is_higher_better
else:
if problem_type == MULTICLASS:
def function_template(y_hat, data):
y_true = data.get_label()
y_hat = y_hat.reshape(len(np.unique(y_true)), -1)
y_hat = y_hat.argmax(axis=0)
return metric.name, metric(y_true, y_hat), is_higher_better
else:
def function_template(y_hat, data):
y_true = data.get_label()
y_hat = np.round(y_hat)
return metric.name, metric(y_true, y_hat), is_higher_better
return function_template
def construct_dataset(x: DataFrame, y: Series, location=None, reference=None, params=None, save=False, weight=None):
try_import_lightgbm()
import lightgbm as lgb
dataset = lgb.Dataset(data=x, label=y, reference=reference, free_raw_data=True, params=params, weight=weight)
if save:
assert location is not None
saving_path = f'{location}.bin'
if os.path.exists(saving_path):
os.remove(saving_path)
os.makedirs(os.path.dirname(saving_path), exist_ok=True)
dataset.save_binary(saving_path)
# dataset_binary = lgb.Dataset(location + '.bin', reference=reference, free_raw_data=False)# .construct()
return dataset
|
py | b4164bd67a721dbc832bec04d5f2efd329f561f3 | import os
from pyngrok.installer import get_ngrok_bin
__author__ = "Alex Laird"
__copyright__ = "Copyright 2022, Alex Laird"
__version__ = "5.1.0"
BIN_DIR = os.path.normpath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "bin"))
DEFAULT_NGROK_PATH = os.path.join(BIN_DIR, get_ngrok_bin())
DEFAULT_CONFIG_PATH = None
DEFAULT_NGROK_CONFIG_PATH = os.path.join(os.path.expanduser("~"), ".ngrok2", "ngrok.yml")
_default_pyngrok_config = None
class PyngrokConfig:
"""
An object containing ``pyngrok``'s configuration for interacting with the ``ngrok`` binary. All values are
optional when it is instantiated, and default values will be used for parameters not passed.
Use :func:`~pyngrok.conf.get_default` and :func:`~pyngrok.conf.set_default` to interact with the default
``pyngrok_config``, or pass another instance of this object as the ``pyngrok_config`` keyword arg to most
methods in the :mod:`~pyngrok.ngrok` module to override the default.
.. code-block:: python
from pyngrok import conf, ngrok
# Here we update the entire default config
pyngrok_config = conf.PyngrokConfig(ngrok_path="/usr/local/bin/ngrok")
conf.set_default(pyngrok_config)
# Here we update just one variable in the default config
conf.get_default().ngrok_path = "/usr/local/bin/ngrok"
# Here we leave the default config as-is and pass an override
pyngrok_config = conf.PyngrokConfig(ngrok_path="/usr/local/bin/ngrok")
ngrok.connect(pyngrok_config=pyngrok_config)
:var ngrok_path: The path to the ``ngrok`` binary, defaults to the value in
`conf.DEFAULT_NGROK_PATH <index.html#config-file>`_
:vartype ngrok_path: str
:var config_path: The path to the ``ngrok`` config, defaults to ``None`` and ``ngrok`` manages it.
:vartype config_path: str
:var auth_token: An authtoken to pass to commands (overrides what is in the config).
:vartype auth_token: str
:var region: The region in which ``ngrok`` should start.
:vartype region: str
:var monitor_thread: Whether ``ngrok`` should continue to be monitored (for logs, etc.) after startup
is complete.
:vartype monitor_thread: bool
:var log_event_callback: A callback that will be invoked each time ``ngrok`` emits a log. ``monitor_thread``
must be set to ``True`` or the function will stop being called after ``ngrok`` finishes starting.
:vartype log_event_callback: types.FunctionType
:var startup_timeout: The max number of seconds to wait for ``ngrok`` to start before timing out.
:vartype startup_timeout: int
:var max_logs: The max number of logs to store in :class:`~pyngrok.process.NgrokProcess`'s ``logs`` variable.
:vartype max_logs: int
:var request_timeout: The max timeout when making requests to ``ngrok``'s API.
:vartype request_timeout: float
:var start_new_session: Passed to :py:class:`subprocess.Popen` when launching ``ngrok``. (Python 3 and POSIX only)
:vartype start_new_session: bool
"""
def __init__(self,
ngrok_path=None,
config_path=None,
auth_token=None,
region=None,
monitor_thread=True,
log_event_callback=None,
startup_timeout=15,
max_logs=100,
request_timeout=4,
start_new_session=False):
self.ngrok_path = DEFAULT_NGROK_PATH if ngrok_path is None else ngrok_path
self.config_path = DEFAULT_CONFIG_PATH if config_path is None else config_path
self.auth_token = auth_token
self.region = region
self.monitor_thread = monitor_thread
self.log_event_callback = log_event_callback
self.startup_timeout = startup_timeout
self.max_logs = max_logs
self.request_timeout = request_timeout
self.start_new_session = start_new_session
def get_default():
"""
Get the default config to be used with methods in the :mod:`~pyngrok.ngrok` module. To override the
default individually, the ``pyngrok_config`` keyword arg can also be passed to most of these methods,
or set a new default config with :func:`~pyngrok.conf.set_default`.
:return: The default ``pyngrok_config``.
:rtype: PyngrokConfig
"""
if _default_pyngrok_config is None:
set_default(PyngrokConfig())
return _default_pyngrok_config
def set_default(pyngrok_config):
"""
Set a new default config to be used with methods in the :mod:`~pyngrok.ngrok` module. To override the
default individually, the ``pyngrok_config`` keyword arg can also be passed to most of these methods.
:param pyngrok_config: The new ``pyngrok_config`` to be used by default.
:type pyngrok_config: PyngrokConfig
"""
global _default_pyngrok_config
_default_pyngrok_config = pyngrok_config
|
py | b4164d767e9aca8ddf61620b922c99c04d69ee45 | def get_larger_numbers(a, b):
return map(max, a, b) |
py | b4164eb95c38481904a52373ebb395c4ae88a39d | '''
Bi-directional JsonRPC Server and Client for Kamaelia.
Copyright (c) 2009 Rasjid Wilcox and CDG Computer Services.
Licensed to the BBC under a Contributor Agreement
'''
import Axon
from Axon.Handle import Handle
from Axon.background import background
from Axon.Ipc import shutdownMicroprocess, producerFinished
from Kamaelia.Chassis.ConnectedServer import ServerCore
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Chassis.Graphline import Graphline
from Kamaelia.Internet.TCPClient import TCPClient
from jsonrpc import JsonRpc20, RPCFault, METHOD_NOT_FOUND, INTERNAL_ERROR, ERROR_MESSAGE, REQUEST, RESPONSE, ERROR, json_split
from traceback import format_exc
from collections import defaultdict
import types, inspect, Queue
# FIXME: add protection from Denial of Service
# decorators to mark funcation args as either
# callback requests or callback notifications
def cb_request(arg_name, response_func, convert_args = False):
def cb_request_dec(func):
if not hasattr(func, '_callbacks_'):
func._callbacks_ = {}
if response_func:
func._callbacks_[arg_name] = ResponseCallback(response_func, convert_args)
else:
func._callbacks_[arg_name] = None
return func
return cb_request_dec
def cb_notification(arg_name):
return cb_request(arg_name, None)
class ResponseCallback(object):
def __init__(self, callback_func, convert_args = False):
'''if convert_args then convert a list, tuple or dict to args in standard jsonrpc way'''
self.callback_func = callback_func
self.convert_args = convert_args
class RequestOrNotification(object):
'If response_callback is None, then this is a notification'
def __init__(self, method, params = None, response_callback = None):
if response_callback: assert isinstance(response_callback, ResponseCallback)
self.method = method
self.params = params
self.response_callback = response_callback
class JsonRpcProtocol(object):
'Protocol Factory for JsonRpc over TCP'
def __init__(self, task_runner, id_prefix = 'server', debug = 0):
self.task_runner = task_runner
self.id_prefix = id_prefix
self.debug = debug
self.dispatch_table = {}
self.callback_table = defaultdict(dict) # try key on actual function
self.requests_on_connect = []
self.requests_on_connect_wait = None # id of request to wait for before sending next
self.requests_sent = {}
self._request_id_num = 1
self.connections = []
def get_request_id(self, request):
req_num = self._request_id_num
if self.id_prefix:
request_id = '%s-%s' % (self.id_prefix, req_num)
else:
request_id = req_num
assert isinstance(request, RequestOrNotification)
self.requests_sent[request_id] = request.response_callback
if request.response_callback:
self.add_callbacks(request.response_callback)
self._request_id_num += 1
return request_id
def add_callbacks(self, function):
if function in self.callback_table:
# already in callback table, so just return
return
if hasattr(function, '_callbacks_'): # 'response_callback'):
for arg_name, response_callback in function._callbacks_.items():
name = function.__name__
self.callback_table[function][arg_name] = response_callback
print 'Added callback for method %s, argument %s' % (name, arg_name)
try:
# args by position - offset needed for instance methods etc
offset = 1 if (hasattr(function, 'im_self') and function.im_self) else 0
arg_num = inspect.getargspec(function)[0].index(arg_name) - offset
self.callback_table[function][arg_num] = response_callback
print 'Added callback for method %s, arg_num %s' % (name, arg_num)
except ValueError:
print 'WARNING: unable to determine argument position for callback on method %s, argument %s.\n' \
'Automatic callback conversion will not occur if called by position.' % (name, arg_name)
def add_function(self, function, name = None):
if name is None:
name = function.__name__
if name in self.dispatch_table:
raise ValueError('rpc method %s already exists!' % name)
self.dispatch_table[name] = function
print 'Added rpc method %s' % name
self.add_callbacks(function)
def add_instance(self, instance, prefix = None):
'''Add all callable attributes of an instance not starting with '_'.
If prefix is none, then the rpc name is just <method_name>,
otherwise it is '<prefix>.<method_name>
'''
for name in dir(instance):
if name[0] != '_':
func = getattr(instance, name, None)
if type(func) == types.MethodType:
if prefix:
rpcname = '%s.%s' % (prefix, func.__name__)
else:
rpcname = func.__name__
self.add_function(func, name = rpcname)
def add_request_on_connect(self, req_or_notification, wait = True):
self.requests_on_connect.append( (req_or_notification, wait) )
def __call__(self, **kwargs):
if self.debug >= 1:
print 'Creating new Protocol Factory: ', str(kwargs)
connection = Graphline( SPLITTER = JsonSplitter(debug = self.debug, factory = self, **kwargs),
DESERIALIZER = Deserializer(debug = self.debug, factory = self, **kwargs),
DISPATCHER = Dispatcher(debug = self.debug, factory = self, **kwargs),
RESPONSESERIALIZER = ResponseSerializer(debug = self.debug, factory = self, **kwargs),
REQUESTSERIALIZER = RequestSerializer(debug = self.debug, factory = self, **kwargs),
FINALIZER = Finalizer(debug = self.debug, factory = self, **kwargs),
TASKRUNNER = self.task_runner,
linkages = { ('self', 'inbox') : ('SPLITTER', 'inbox'),
('self', 'request') : ('REQUESTSERIALIZER', 'request'),
('SPLITTER', 'outbox') : ('DESERIALIZER', 'inbox'),
('DESERIALIZER', 'outbox'): ('DISPATCHER', 'inbox'),
('DESERIALIZER', 'error'): ('RESPONSESERIALIZER', 'inbox'),
('DISPATCHER', 'outbox') : ('TASKRUNNER', 'inbox'),
('DISPATCHER', 'result_out') : ('RESPONSESERIALIZER', 'inbox'),
('DISPATCHER', 'request_out') : ('REQUESTSERIALIZER', 'request'),
('RESPONSESERIALIZER', 'outbox') : ('self', 'outbox'),
('REQUESTSERIALIZER', 'outbox'): ('self', 'outbox'),
('self', 'control') : ('SPLITTER', 'control'),
('SPLITTER', 'signal') : ('DESERIALIZER', 'control'),
('DESERIALIZER', 'signal'): ('DISPATCHER', 'control'),
('DISPATCHER', 'signal') : ('RESPONSESERIALIZER', 'control'),
('RESPONSESERIALIZER', 'signal') : ('REQUESTSERIALIZER', 'control'),
('REQUESTSERIALIZER', 'signal') : ('FINALIZER', 'control'),
('FINALIZER', 'signal') : ('self', 'signal'),
('DISPATCHER', 'wake_requester') : ('REQUESTSERIALIZER', 'control'),
} )
self.connections.append(connection)
return connection
class JsonSplitter(Axon.Component.component):
Inboxes = { 'inbox': 'accepts arbitrary (sequential) pieces of json stings',
'control': 'incoming shutdown requests' }
Outboxes = { 'outbox': 'a single complete json string',
'signal': 'outgoing shutdown requests' }
def __init__(self, **kwargs):
super(JsonSplitter, self).__init__(**kwargs)
self.partial_data = ''
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
data = self.recv('inbox')
if self.debug >= 4: print 'Got data: <<%s>>' % data
Json_strings, self.partial_data = json_split(self.partial_data + data)
yield 1
# send to dispatch
for message in Json_strings:
if self.debug >= 3: print 'Sent to deserializer: %s' % message
self.send(message, 'outbox')
yield 1
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class Deserializer(Axon.Component.component):
Inboxes = {'inbox': 'complete json strings',
'control': 'shutdown messages',
}
Outboxes = {'outbox': 'the deserialized request/notification or result',
'error': 'the exception if there was an error deserializing',
'signal': 'shutdown messages',
}
def __init__(self, **kwargs):
super(Deserializer, self).__init__(**kwargs)
self.serializer = JsonRpc20() # FIXME: make this a paramater
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
data = self.recv('inbox')
if self.debug >=1: print '--> %s' % data
try:
request = self.serializer.loads_request_response(data)
self.send(request, 'outbox')
except RPCFault, error:
self.send( (error, None), 'error')
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class CallbackProxy(object):
def __init__(self, method_name, response_callback):
self.method_name = method_name
self.response_callback = response_callback
self.params = None
self.component = None
self.outbox_name = None
def set_outbox(self, component, outbox_name):
self.component = component
self.outbox_name = outbox_name
def __call__(self, params = None):
if not self.component or not self.outbox_name:
raise ValueError('component or outbox_name not set')
req = RequestOrNotification(self.method_name, params, self.response_callback)
self.component.send(req, self.outbox_name)
class Dispatcher(Axon.Component.component):
Inboxes = {'inbox': 'rpc request/notification or response objects',
'result_in': 'the function/method result or RequestOrNotification',
'control': 'shutdown messages',
}
Outboxes = {'outbox': '(return_component, method, args, id) tuple for the worker. NOTE: return_component == (self, <boxname>)',
'result_out': 'the result of the request (relayed from result_in)',
'request_out': 'requests from callback functions',
'signal': 'shutdown messages',
'wake_requester': 'wake up RequestSerializer',
}
def __init__(self, **kwargs):
super(Dispatcher, self).__init__(**kwargs)
if self.debug >= 3: print 'Created %s' % repr(self)
def _do_dispatch(self, dispatch_func, args, id, notification, convert_args = True):
'Assumes args is always a list, tuple or dict'
kwargs = {}
if convert_args:
if isinstance(args, dict):
# args by name
args, kwargs = [], args
# find any callback args and replace with callback proxy
for arg_name in set(self.factory.callback_table[dispatch_func].keys()).intersection(set(kwargs.keys())):
kwargs[arg_name] = CallbackProxy(kwargs[arg_name], self.factory.callback_table[dispatch_func][arg_name])
else:
arg_nums = range(len(args))
for arg_num in set(self.factory.callback_table[dispatch_func].keys()).intersection(set(arg_nums)):
args[arg_num] = CallbackProxy(args[arg_num], self.factory.callback_table[dispatch_func][arg_num])
else:
args = [args]
return_box = (self, 'result_in')
dispatch_info = (dispatch_func, args, kwargs)
return_info = (id, notification)
if self.debug >= 3: print 'Sending: %r\n%r\n%r' % (return_box, dispatch_info, return_info)
self.send( (return_box, dispatch_info, return_info), 'outbox')
def _process_request(self, request):
if self.debug >= 3: print 'Got dispatch request: %s' % repr(request)
notification = False
if len(request) == 2:
notification = True
method, args = request
id = None
else:
method, args, id = request
if not notification and method not in self.factory.dispatch_table:
response = ( RPCFault(METHOD_NOT_FOUND, ERROR_MESSAGE[METHOD_NOT_FOUND]), id)
self.send(response, 'result_out')
else:
dispatch_func = self.factory.dispatch_table[method]
self._do_dispatch(dispatch_func, args, id, notification)
def _process_response(self, response):
print '=== Response: %s ===' % repr(response)
result, id = response
response_callback = None
if id == self.factory.requests_on_connect_wait:
self.factory.requests_on_connect_wait = None # clear waiting on this request
if len(self.factory.requests_on_connect):
self.send(Axon.Ipc.notify(self, id), 'wake_requester') # wake requester so it can send pending requests
# look up response callback
try:
response_callback = self.factory.requests_sent.pop(id)
assert isinstance(response_callback, ResponseCallback)
except KeyError:
print 'ERROR: Invalid response id %s' % id
if result is None:
return
if response_callback.convert_args and type(result) not in (types.ListType, types.TupleType, types.DictionaryType):
print "ERROR: Can't convert response result to procedure argments - must be List, Tuple or Dict"
return
if not response_callback:
print 'ERROR: Got result for a notification or request with no callback defined'
else:
self._do_dispatch(response_callback.callback_func, result, id, True, convert_args = response_callback.convert_args) # not really a notification - but we don't return a response to a response
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
data = self.recv('inbox')
if data[0] == REQUEST:
request = data[1]
self._process_request(request)
elif data[0] == RESPONSE:
# got a response to a request we sent
response = data[1]
self._process_response(response)
elif data[0] == ERROR:
# FIXME: handle error responses
print '!!! GOT ERROR RESPONSE: %s' % repr(data[1])
else:
# FIXME
print 'INTERNAL ERROR: Unexpected message type'
if self.dataReady('result_in'):
data = self.recv('result_in')
result, (id, notification) = data
if isinstance(result, RequestOrNotification):
if self.debug >= 3: print 'Got RequestOrNotification: %s' % result
self.send(result, 'request_out')
else:
if self.debug >= 2: print 'Got result for id %s:\n %s' % (id, repr(result))
if not notification:
self.send((result, id), 'result_out')
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class ResponseSerializer(Axon.Component.component):
Inboxes = {'inbox': '(result, id) tuple',
'control': 'shutdown messages',
}
Outboxes = {'outbox': 'the json-rpc response',
'signal': 'shutdown messages',
}
def __init__(self, **kwargs):
super(ResponseSerializer, self).__init__(**kwargs)
self.serializer = JsonRpc20() # FIXME: make this a paramater
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
result, id = self.recv('inbox')
if self.debug >= 3: print 'Got result. Id: %r, Value: %r' % (id, result)
if isinstance(result, RPCFault):
response = self.serializer.dumps_error( result, id)
elif isinstance(result, Exception):
# procedure exception - FIXME: log to logger!
print format_exc()
response = self.serializer.dumps_error( RPCFault(INTERNAL_ERROR, ERROR_MESSAGE[INTERNAL_ERROR]), id )
else:
try:
response = self.serializer.dumps_response(result, id)
except RPCFault, e:
response = self.serializer.dumps_error( e, id)
# serialization error - log to logger!
print format_exc()
response = self.serializer.dumps_error( RPCFault(INTERNAL_ERROR, ERROR_MESSAGE[INTERNAL_ERROR]), id )
response += '\r\n' # make things easier to read if testing with telnet or netcat
if self.debug >= 1:
print '<-- %s' % response
self.send(response, 'outbox')
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class RequestSerializer(Axon.Component.component):
Inboxes = {'inbox': 'not used',
'request' : 'incoming RequestOrNotification objects',
'control': 'wakeup & shutdown messages',
}
Outboxes = {'outbox': 'the json-rpc request / notification',
'signal': 'shutdown messages',
}
def __init__(self, **kwargs):
super(RequestSerializer, self).__init__(**kwargs)
self.serializer = JsonRpc20() # FIXME: make this a paramater
if self.debug >= 3: print 'Created %s' % repr(self)
def _send_req_or_notification(self, req, wait = False):
assert isinstance(req, RequestOrNotification)
if req.response_callback:
id = self.factory.get_request_id(req) # this adds the id to self.requests_sent
if wait:
self.factory.requests_on_connect_wait = id
output = self.serializer.dumps_request(req.method, req.params, id) if req.params \
else self.serializer.dumps_request(req.method, id = id)
else:
output = self.serializer.dumps_notification(req.method, req.params) if req.params \
else self.serializer.dumps_notification(req.method)
output += '\r\n' # make things easier to read if testing with telnet or netcat
if self.debug >= 1: print '<-- %s' % output
self.send(output, 'outbox')
def main(self):
while not self.shutdown():
if len(self.factory.requests_on_connect) and not self.factory.requests_on_connect_wait:
request, wait = self.factory.requests_on_connect.pop(0)
self._send_req_or_notification(request, wait)
if self.dataReady('request'):
req = self.recv('request')
self._send_req_or_notification(req)
if not self.anyReady() and (len(self.factory.requests_on_connect) == 0 or self.factory.requests_on_connect_wait) :
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class Finalizer(Axon.Component.component):
Inboxes = {'inbox': 'not used',
'control': 'shutdown messages',
}
Outboxes = {'outbox': 'not used',
'signal': 'shutdown messages',
}
def __init__(self, **kwargs):
super(Finalizer, self).__init__(**kwargs)
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
# FIXME: Log any outstanding request reponses missing
print 'Connection is being closed'
for req_id in self.factory.requests_sent:
print 'WARNING: No response seen to request %s' % req_id
self.send(msg, 'signal')
return True
return False
# -------------------------------------------
def ThreadedTaskRunner(num_workers = 5, debug = 0):
worker_list = []
for dummy in range(num_workers):
worker = ThreadedWorker(debug = debug)
worker.activate()
worker_list.append(worker)
manager = TaskManager(worker_list, debug = debug)
return manager
class ThreadedWorker(Axon.ThreadedComponent.threadedcomponent):
Inboxes = {'inbox': '(function, args, kwargs) tuple',
'control': 'shutdown messages',
}
Outboxes = {'outbox': 'the result or exception or callback request',
'signal': 'shutdown messages',
}
def __init__(self, **kwargs):
super(ThreadedWorker, self).__init__(**kwargs)
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
func, args, kwargs = self.recv('inbox')
for arg in args:
if isinstance(arg, CallbackProxy):
arg.set_outbox(self, 'outbox')
for arg_name in kwargs:
if isinstance(kwargs[arg_name], CallbackProxy):
kwargs[arg_name].set_outbox(self, 'outbox')
if self.debug >= 3: print 'Worker %s got data: %r, %r, %r' % (id(self), func, args, kwargs)
try:
result = func(*args, **kwargs)
except Exception, error:
result = error
if self.debug >= 3: print 'Worker %s got result: %r' % (id(self), result)
self.send(result, 'outbox')
if not self.anyReady():
self.pause()
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
self.send(msg, 'signal')
return True
return False
class TaskManager(Axon.AdaptiveCommsComponent.AdaptiveCommsComponent):
Inboxes = {'inbox': '(return_box, dispatch_info, return_info) tuple',
'control': 'shutdown messages',
}
Outboxes = {'outbox': 'not used',
'signal': 'shutdown messages',
}
'''
return_box = (<sending_component>, <return_box_name>)
dispatch_info = (self.factory.dispatch_table[method], args, kwargs)
return_info = (id, notification)
'''
def __init__(self, workers, debug = 0):
super(TaskManager, self).__init__()
self.debug = debug
self.workers = workers # a list of worker components
self.task_data = [ None for x in range(len(workers)) ] # an available worker has None here
self.work_queue = []
self.worker_box_names = []
self.links = []
# make connections to the workers
for worker_num in range(len(self.workers)):
outbox_name = self.addOutbox('to_worker_%s' % worker_num)
inbox_name = self.addInbox('from_worker_%s' % worker_num)
signal_name = self.addOutbox('signal_worker_%s' % worker_num)
boxnames = {'to': outbox_name, 'from': inbox_name, 'signal': signal_name}
self.worker_box_names.append(boxnames)
outlink = self.link((self, outbox_name), (self.workers[worker_num], 'inbox'))
control_link = self.link((self, signal_name), (self.workers[worker_num], 'control'))
inlink = self.link((self.workers[worker_num], 'outbox'), (self, inbox_name))
self.links.append((outlink, control_link, inlink))
if self.debug >= 3: print 'Created %s' % repr(self)
def main(self):
while not self.shutdown():
if self.dataReady('inbox'):
data = self.recv('inbox')
if self.debug >= 3: print 'Task Manager got data: %s' % repr(data)
self.work_queue.append(data)
if len(self.work_queue) != 0 and None in self.task_data:
return_box, dispatch_info, return_info = self.work_queue.pop(0)
result_box_name = self.addOutbox('%s-%s-%s' % (id(return_box), id(dispatch_info), id(return_info)))
self.link((self, result_box_name), return_box)
worker_num = self.task_data.index(None) # pick the first free worker
self.task_data[worker_num] = (result_box_name, return_box, return_info)
if self.debug >= 3:
print 'Sending task data to worker %s (box %s)' % (worker_num, self.worker_box_names[worker_num]['to'])
print 'Dispatch:', dispatch_info
self.send(dispatch_info, self.worker_box_names[worker_num]['to'])
if self.anyReady():
for worker_num in range(len(self.workers)):
boxname = self.worker_box_names[worker_num]['from']
if self.dataReady(boxname):
data = self.recv(boxname)
if self.debug >= 3: print 'TaskManager got data %r on boxname %s' % (data, boxname)
result_box_name, return_box, return_info = self.task_data[worker_num]
self.send( (data, return_info), result_box_name) # post the result
if not isinstance(data, RequestOrNotification):
if self.debug >= 3: print '** Doing unlink ** on %s' % result_box_name
self.unlink( (self, result_box_name), return_box)
self.deleteOutbox(result_box_name)
self.task_data[worker_num] = None # mark that worker as done
yield 1
if not self.anyReady():
self.pause()
yield 1
if self.debug >= 3:
print 'End of main for %s' % self.__class__.__name__
def shutdown(self):
if self.dataReady('control'):
msg = self.recv('control')
if isinstance(msg, shutdownMicroprocess) or isinstance(msg, producerFinished):
if self.debug >= 3: print '%s got shutdown msg: %r' % (self.__class__.__name__, msg)
for boxnames in self.worker_box_names:
self.send(msg, boxnames['signal'])
self.send(msg, 'signal')
return True
return False
class JsonRPCBase(object):
'Base class for JsonRPC clients and servers'
def __init__(self, workers, debug):
self.workers = workers
self.debug = debug
taskrunner = ThreadedTaskRunner(num_workers = self.workers, debug = self.debug)
self.jsonprotocol = JsonRpcProtocol(taskrunner, debug = self.debug)
def add_function(self, func):
self.jsonprotocol.add_function(func)
def add_instance(self, instance):
self.jsonprotocol.add_instance(instance)
def add_request_on_connect(self, req_or_notification, wait = True):
self.jsonprotocol.add_request_on_connect(req_or_notification, wait)
class JsonRpcTCPServer(JsonRPCBase):
def __init__(self, portnumber, workers = 5, debug = 1):
JsonRPCBase.__init__(self, workers = workers, debug = debug)
self.portnumber = portnumber
self.server = None
def start(self):
if self.debug: print 'Starting JSON-RPC server on port %s' % self.portnumber
self.server = ServerCore( protocol = self.jsonprotocol, port = self.portnumber )
self.server.run()
#FIXME: some way to stop!
class JsonRpcTCPClient(JsonRPCBase):
def __init__(self, host, portnumber, delay = 0, workers = 5, debug = 1):
JsonRPCBase.__init__(self, workers = workers, debug = debug)
self.host = host
self.portnumber = portnumber
self.delay = delay
self.client = Graphline(
TCPCLIENT = TCPClient(self.host, self.portnumber, self.delay),
PROTOCOL = self.jsonprotocol(),
linkages = { ('TCPCLIENT', 'outbox') : ('PROTOCOL', 'inbox'),
('PROTOCOL', 'outbox') : ('TCPCLIENT', 'inbox'),
('TCPCLIENT', 'signal') : ('PROTOCOL', 'control'),
('PROTOCOL', 'signal') : ('TCPCLIENT', 'control'),
} )
self.handle = Handle(self.client)
def start(self):
if self.debug: print 'Starting TCP Client - connecting to %s on port %s' % (self.host, self.portnumber)
##self.client.run()
try:
background().start()
except:
pass # assume already running
self.client.activate()
class Proxy(object):
def __init__(self, host, portnumber, delay = 0, threaded = True, workers = 5, debug = 1):
self.host = host
self.portnumber = portnumber
self.delay = delay
self.threaded = threaded
self.workers = workers
self.debug = debug
self.client = JsonRpcTCPClient(host = host, portnumber = portnumber, delay = delay, threaded = True, workers = workers,
debug = debug)
self.request = RequestProxy(self.client, True)
self.notification = RequestProxy(self.client, False)
class RequestProxy(object):
def __init__(self, client, request = True):
self.client = client
self.request = request
def _remote_call(self, name, params):
client = self.client
|
py | b4164fd1cb1c16ad818fbbd3e34d2c1c83879164 | """Django models utilities."""
# Django
from django.db import models
class CRideModel(models.Model):
"""Comparte Ride base model.
CRideModel acts as an abstract base class from which every
other model in the project will inherit. This class provides
every table with the following attributes:
+ created (DateTime): Store the datetime the object was created.
+ modified (DateTime): Store the last datetime the object was modified.
"""
created = models.DateTimeField(
'created at',
auto_now_add=True,
help_text='Date time on which the object was created.'
)
modified = models.DateTimeField(
'modified at',
auto_now=True,
help_text='Date time on which the object was last modified.'
)
class Meta:
"""Meta option."""
# Abstract esta no es una tabla de la base de datos
abstract = True
get_latest_by = 'created'
ordering = ['-created', '-modified']
# class Student(CRideModel):
# name = models.CharField()
# class Meta(CRideModel.Meta):
# db_table = ""
|
py | b41650eff2a63d1f334d76cdeb9ee7d0aaf5ff54 | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['ProjectArgs', 'Project']
@pulumi.input_type
class ProjectArgs:
def __init__(__self__, *,
group_name: pulumi.Input[str],
service_name: pulumi.Input[str],
source_platform: pulumi.Input[Union[str, 'ProjectSourcePlatform']],
target_platform: pulumi.Input[Union[str, 'ProjectTargetPlatform']],
databases_info: Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseInfoArgs']]]] = None,
location: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
source_connection_info: Optional[pulumi.Input['SqlConnectionInfoArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
target_connection_info: Optional[pulumi.Input['SqlConnectionInfoArgs']] = None):
"""
The set of arguments for constructing a Project resource.
:param pulumi.Input[str] group_name: Name of the resource group
:param pulumi.Input[str] service_name: Name of the service
:param pulumi.Input[Union[str, 'ProjectSourcePlatform']] source_platform: Source platform for the project
:param pulumi.Input[Union[str, 'ProjectTargetPlatform']] target_platform: Target platform for the project
:param pulumi.Input[Sequence[pulumi.Input['DatabaseInfoArgs']]] databases_info: List of DatabaseInfo
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] project_name: Name of the project
:param pulumi.Input['SqlConnectionInfoArgs'] source_connection_info: Information for connecting to source
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input['SqlConnectionInfoArgs'] target_connection_info: Information for connecting to target
"""
pulumi.set(__self__, "group_name", group_name)
pulumi.set(__self__, "service_name", service_name)
pulumi.set(__self__, "source_platform", source_platform)
pulumi.set(__self__, "target_platform", target_platform)
if databases_info is not None:
pulumi.set(__self__, "databases_info", databases_info)
if location is not None:
pulumi.set(__self__, "location", location)
if project_name is not None:
pulumi.set(__self__, "project_name", project_name)
if source_connection_info is not None:
pulumi.set(__self__, "source_connection_info", source_connection_info)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if target_connection_info is not None:
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="groupName")
def group_name(self) -> pulumi.Input[str]:
"""
Name of the resource group
"""
return pulumi.get(self, "group_name")
@group_name.setter
def group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "group_name", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Input[str]:
"""
Name of the service
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: pulumi.Input[str]):
pulumi.set(self, "service_name", value)
@property
@pulumi.getter(name="sourcePlatform")
def source_platform(self) -> pulumi.Input[Union[str, 'ProjectSourcePlatform']]:
"""
Source platform for the project
"""
return pulumi.get(self, "source_platform")
@source_platform.setter
def source_platform(self, value: pulumi.Input[Union[str, 'ProjectSourcePlatform']]):
pulumi.set(self, "source_platform", value)
@property
@pulumi.getter(name="targetPlatform")
def target_platform(self) -> pulumi.Input[Union[str, 'ProjectTargetPlatform']]:
"""
Target platform for the project
"""
return pulumi.get(self, "target_platform")
@target_platform.setter
def target_platform(self, value: pulumi.Input[Union[str, 'ProjectTargetPlatform']]):
pulumi.set(self, "target_platform", value)
@property
@pulumi.getter(name="databasesInfo")
def databases_info(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseInfoArgs']]]]:
"""
List of DatabaseInfo
"""
return pulumi.get(self, "databases_info")
@databases_info.setter
def databases_info(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseInfoArgs']]]]):
pulumi.set(self, "databases_info", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="projectName")
def project_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the project
"""
return pulumi.get(self, "project_name")
@project_name.setter
def project_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_name", value)
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> Optional[pulumi.Input['SqlConnectionInfoArgs']]:
"""
Information for connecting to source
"""
return pulumi.get(self, "source_connection_info")
@source_connection_info.setter
def source_connection_info(self, value: Optional[pulumi.Input['SqlConnectionInfoArgs']]):
pulumi.set(self, "source_connection_info", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> Optional[pulumi.Input['SqlConnectionInfoArgs']]:
"""
Information for connecting to target
"""
return pulumi.get(self, "target_connection_info")
@target_connection_info.setter
def target_connection_info(self, value: Optional[pulumi.Input['SqlConnectionInfoArgs']]):
pulumi.set(self, "target_connection_info", value)
class Project(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
databases_info: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DatabaseInfoArgs']]]]] = None,
group_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
source_connection_info: Optional[pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']]] = None,
source_platform: Optional[pulumi.Input[Union[str, 'ProjectSourcePlatform']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
target_connection_info: Optional[pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']]] = None,
target_platform: Optional[pulumi.Input[Union[str, 'ProjectTargetPlatform']]] = None,
__props__=None):
"""
A project resource
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DatabaseInfoArgs']]]] databases_info: List of DatabaseInfo
:param pulumi.Input[str] group_name: Name of the resource group
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] project_name: Name of the project
:param pulumi.Input[str] service_name: Name of the service
:param pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']] source_connection_info: Information for connecting to source
:param pulumi.Input[Union[str, 'ProjectSourcePlatform']] source_platform: Source platform for the project
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']] target_connection_info: Information for connecting to target
:param pulumi.Input[Union[str, 'ProjectTargetPlatform']] target_platform: Target platform for the project
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ProjectArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A project resource
:param str resource_name: The name of the resource.
:param ProjectArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ProjectArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
databases_info: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DatabaseInfoArgs']]]]] = None,
group_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
source_connection_info: Optional[pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']]] = None,
source_platform: Optional[pulumi.Input[Union[str, 'ProjectSourcePlatform']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
target_connection_info: Optional[pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']]] = None,
target_platform: Optional[pulumi.Input[Union[str, 'ProjectTargetPlatform']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ProjectArgs.__new__(ProjectArgs)
__props__.__dict__["databases_info"] = databases_info
if group_name is None and not opts.urn:
raise TypeError("Missing required property 'group_name'")
__props__.__dict__["group_name"] = group_name
__props__.__dict__["location"] = location
__props__.__dict__["project_name"] = project_name
if service_name is None and not opts.urn:
raise TypeError("Missing required property 'service_name'")
__props__.__dict__["service_name"] = service_name
__props__.__dict__["source_connection_info"] = source_connection_info
if source_platform is None and not opts.urn:
raise TypeError("Missing required property 'source_platform'")
__props__.__dict__["source_platform"] = source_platform
__props__.__dict__["tags"] = tags
__props__.__dict__["target_connection_info"] = target_connection_info
if target_platform is None and not opts.urn:
raise TypeError("Missing required property 'target_platform'")
__props__.__dict__["target_platform"] = target_platform
__props__.__dict__["creation_time"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:datamigration/v20180315preview:Project"), pulumi.Alias(type_="azure-native:datamigration:Project"), pulumi.Alias(type_="azure-nextgen:datamigration:Project"), pulumi.Alias(type_="azure-native:datamigration/v20171115preview:Project"), pulumi.Alias(type_="azure-nextgen:datamigration/v20171115preview:Project"), pulumi.Alias(type_="azure-native:datamigration/v20180331preview:Project"), pulumi.Alias(type_="azure-nextgen:datamigration/v20180331preview:Project"), pulumi.Alias(type_="azure-native:datamigration/v20180419:Project"), pulumi.Alias(type_="azure-nextgen:datamigration/v20180419:Project"), pulumi.Alias(type_="azure-native:datamigration/v20180715preview:Project"), pulumi.Alias(type_="azure-nextgen:datamigration/v20180715preview:Project")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Project, __self__).__init__(
'azure-native:datamigration/v20180315preview:Project',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Project':
"""
Get an existing Project resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ProjectArgs.__new__(ProjectArgs)
__props__.__dict__["creation_time"] = None
__props__.__dict__["databases_info"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["source_connection_info"] = None
__props__.__dict__["source_platform"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["target_connection_info"] = None
__props__.__dict__["target_platform"] = None
__props__.__dict__["type"] = None
return Project(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="creationTime")
def creation_time(self) -> pulumi.Output[str]:
"""
UTC Date and time when project was created
"""
return pulumi.get(self, "creation_time")
@property
@pulumi.getter(name="databasesInfo")
def databases_info(self) -> pulumi.Output[Optional[Sequence['outputs.DatabaseInfoResponse']]]:
"""
List of DatabaseInfo
"""
return pulumi.get(self, "databases_info")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The project's provisioning state
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> pulumi.Output[Optional['outputs.SqlConnectionInfoResponse']]:
"""
Information for connecting to source
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="sourcePlatform")
def source_platform(self) -> pulumi.Output[str]:
"""
Source platform for the project
"""
return pulumi.get(self, "source_platform")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> pulumi.Output[Optional['outputs.SqlConnectionInfoResponse']]:
"""
Information for connecting to target
"""
return pulumi.get(self, "target_connection_info")
@property
@pulumi.getter(name="targetPlatform")
def target_platform(self) -> pulumi.Output[str]:
"""
Target platform for the project
"""
return pulumi.get(self, "target_platform")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
|
py | b416510fce78b27e86f210ea9fa30ee2acaf3544 |
import threading
import sys
import Client
import PyQt_Server_Worker
import time
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
# Create the main Qt app, passing command line arguments
class ClientWindow(QMainWindow):
def __init__(self):
super(ClientWindow,self).__init__()
self.setWindowTitle('Client')
self.resize(400, 600)
self.initServer()
self.initUI()
def initServer(self):
self.threadpool = QThreadPool()
self.client = Client.Client()
print("Multithreading with maximum %d threads" % self.threadpool.maxThreadCount())
def initUI(self):
flags = Qt.WindowFlags(Qt.FramelessWindowHint | Qt.WA_TranslucentBackground )
self.setWindowFlags(flags)
self.setAttribute(Qt.WA_TranslucentBackground, True)
#self.setWindowFlag(Qt.WindowMinimizeButtonHint, True)
#self.setWindowFlag(Qt.WindowMaximizeButtonHint, True)
self.setWindowOpacity(0.6)
self.blur_effect = QGraphicsBlurEffect()
self.label_background = QLabel("transparent ",self)
self.label_background.setGraphicsEffect(self.blur_effect)
self.label_background.setText("Bite")
self.label_background.setGeometry(0,0,400,600)
self.label_background.move(0,0)
pixmap = QPixmap("Lucian.jpg").scaled(self.size())
self.label_background.setPixmap(pixmap)
self.quit = QPushButton(self)
self.quit.setText("X")
self.quit.clicked.connect(self.close)
self.quit.setGeometry(360,20,20,20)
self.onoffbutton = QPushButton(self)
self.onoffbutton.setText("Start")
self.onoffbutton.clicked.connect(self.startstop)
self.onoffbutton.setGeometry(50,500,80,30)
self.label_serverstatus = QLabel(self)
self.label_serverstatus.setStyleSheet("background-color: red")
#self.label_serverstatus.setGraphicsEffect(self.blur_effect)
self.label_serverstatus.setGeometry(0,0,15,15)
self.label_serverstatus.move(150,510)
def mousePressEvent(self, event):
self.oldPos = event.globalPos()
def mouseMoveEvent(self, event):
delta = QPoint (event.globalPos() - self.oldPos)
#print(delta)
self.move(self.x() + delta.x(), self.y() + delta.y())
self.oldPos = event.globalPos()
def startstop(self):
if self.onoffbutton.text() == "Start":
self.onoffbutton.setText("Stop")
self.worker = PyQt_Server_Worker.Worker(self.client.start)
self.threadpool.start(self.worker)
self.label_serverstatus.setStyleSheet("background-color: green")
else:
self.onoffbutton.setText("Start")
self.client.stop()
self.threadpool.waitForDone(100)
self.worker.quit()
self.label_serverstatus.setStyleSheet("background-color: red")
def hello(self,mot):
time.sleep(10)
print(mot)
return None
def window():
app = QApplication(sys.argv)
win = ClientWindow()
win.show()
sys.exit(app.exec_())
#thread_gui = threading.Thread(target=window)
#thread_gui.start()
window()
# Run the app, passing it's exit code back through `sys.exit()`
# The app will exit when the close button is pressed on the main window.
|
py | b416520bd0c17d4c94fdf69c0f58ab3ef1ad3c81 | import RPi.GPIO as GPIO
import tplink_smartplug_py3 as plug
GPIO.setmode(GPIO.BCM)
GPIO.setup(20,GPIO.IN)
GPIO.setup(21,GPIO.OUT)
while True:
if GPIO.input(20)==GPIO.HIGH:
GPIO.output(21,GPIO.HIGH)
plug.control('172.41.195.22', 'on')
# plug.control('172.41.195.22', 'off')
else:
GPIO.output(21,GPIO.LOW)
|
py | b41652cd5799a335e51730ea1ef102bac8265552 | #!/usr/bin/env python
# Copyright Contributors to the Open Shading Language project.
# SPDX-License-Identifier: BSD-3-Clause
# https://github.com/imageworks/OpenShadingLanguage
command += testshade("-g 512 512 -od uint8 -o Cout out.tif test")
outputs = [ "out.txt", "out.tif" ]
|
py | b41653268b898b7e9b822f29329e154b9ebb9c0f | inches = float(input())
def convert_to_cm(n):
cm = n * 2.54
return cm
print(convert_to_cm(inches))
|
py | b41653e08c054943dd7c36d4fb5d1832d0e4b667 | # Generated by Django 2.2 on 2020-05-25 17:21
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0011_update_proxy_permissions'),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(max_length=255, unique=True)),
('name', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('is_staff', models.BooleanField(default=False)),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'abstract': False,
},
),
]
|
py | b41654831d8fdef17a0891623f743a2b2d912f4d | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
for app in orm.SocialApp.objects.all():
app.client_id = app.key
app.key = ''
app.save()
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 12, 22, 12, 51, 18, 10544)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 12, 22, 12, 51, 18, 10426)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'socialaccount.socialaccount': {
'Meta': {'unique_together': "(('provider', 'uid'),)", 'object_name': 'SocialAccount'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'extra_data': ('allauth.socialaccount.fields.JSONField', [], {'default': "'{}'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'socialaccount.socialapp': {
'Meta': {'object_name': 'SocialApp'},
'client_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'secret': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sites.Site']", 'symmetrical': 'False', 'blank': 'True'})
},
'socialaccount.socialtoken': {
'Meta': {'unique_together': "(('app', 'account'),)", 'object_name': 'SocialToken'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['socialaccount.SocialAccount']"}),
'app': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['socialaccount.SocialApp']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'token_secret': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['socialaccount']
|
py | b41654dc6b30bb3e58f1e0bbc97907697afb4553 | """A fast, drop-in replacement for pygments ``get_*()`` and ``guess_*()`` funtions.
The following pygments API functions are currently supplied here::
from pygments_cache import get_lexer_for_filename, guess_lexer_for_filename
from pygments_cache import get_formatter_for_filename, get_formatter_by_name
from pygments_cache import get_style_by_name, get_all_styles
from pygments_cache import get_filter_by_name
The cache itself is stored at the location given by the ``$PYGMENTS_CACHE_FILE``
environment variable, or by default at ``~/.local/share/pygments-cache/cache.py``.
The cache file is created on first use, if it does not already exist.
"""
import os
import importlib
# Global storage variables
__version__ = "0.1.1"
CACHE = None
CUSTOM_STYLES = {}
DEBUG = False
def _print_duplicate_message(duplicates):
import sys
for filename, vals in sorted(duplicates.items()):
msg = "for {0} ambiquity between:\n ".format(filename)
vals = [m + ":" + c for m, c in vals]
msg += "\n ".join(sorted(vals))
print(msg, file=sys.stderr)
def _discover_lexers():
import inspect
from pygments.lexers import get_all_lexers, find_lexer_class
# maps file extension (and names) to (module, classname) tuples
default_exts = {
# C / C++
".h": ("pygments.lexers.c_cpp", "CLexer"),
".hh": ("pygments.lexers.c_cpp", "CppLexer"),
".cp": ("pygments.lexers.c_cpp", "CppLexer"),
# python
".py": ("pygments.lexers.python", "Python3Lexer"),
".pyw": ("pygments.lexers.python", "Python3Lexer"),
".sc": ("pygments.lexers.python", "Python3Lexer"),
".tac": ("pygments.lexers.python", "Python3Lexer"),
"SConstruct": ("pygments.lexers.python", "Python3Lexer"),
"SConscript": ("pygments.lexers.python", "Python3Lexer"),
".sage": ("pygments.lexers.python", "Python3Lexer"),
".pytb": ("pygments.lexers.python", "Python3TracebackLexer"),
# perl
".t": ("pygments.lexers.perl", "Perl6Lexer"),
".pl": ("pygments.lexers.perl", "Perl6Lexer"),
".pm": ("pygments.lexers.perl", "Perl6Lexer"),
# asm
".s": ("pygments.lexers.asm", "GasLexer"),
".S": ("pygments.lexers.asm", "GasLexer"),
".asm": ("pygments.lexers.asm", "NasmLexer"),
".ASM": ("pygments.lexers.asm", "NasmLexer"),
# Antlr
".g": ("pygments.lexers.parsers", "AntlrCppLexer"),
".G": ("pygments.lexers.parsers", "AntlrCppLexer"),
# XML
".xml": ("pygments.lexers.html", "XmlLexer"),
".xsl": ("pygments.lexers.html", "XsltLexer"),
".xslt": ("pygments.lexers.html", "XsltLexer"),
# ASP
".axd": ("pygments.lexers.dotnet", "CSharpAspxLexer"),
".asax": ("pygments.lexers.dotnet", "CSharpAspxLexer"),
".ascx": ("pygments.lexers.dotnet", "CSharpAspxLexer"),
".ashx": ("pygments.lexers.dotnet", "CSharpAspxLexer"),
".asmx": ("pygments.lexers.dotnet", "CSharpAspxLexer"),
".aspx": ("pygments.lexers.dotnet", "CSharpAspxLexer"),
# misc
".b": ("pygments.lexers.esoteric", "BrainfuckLexer"),
".j": ("pygments.lexers.jvm", "JasminLexer"),
".m": ("pygments.lexers.matlab", "MatlabLexer"),
".n": ("pygments.lexers.dotnet", "NemerleLexer"),
".p": ("pygments.lexers.pawn", "PawnLexer"),
".v": ("pygments.lexers.theorem", "CoqLexer"),
".as": ("pygments.lexers.actionscript", "ActionScript3Lexer"),
".fs": ("pygments.lexers.forth", "ForthLexer"),
".hy": ("pygments.lexers.lisp", "HyLexer"),
".ts": ("pygments.lexers.javascript", "TypeScriptLexer"),
".rl": ("pygments.lexers.parsers", "RagelCppLexer"),
".bas": ("pygments.lexers.basic", "QBasicLexer"),
".bug": ("pygments.lexers.modeling", "BugsLexer"),
".ecl": ("pygments.lexers.ecl", "ECLLexer"),
".inc": ("pygments.lexers.php", "PhpLexer"),
".inf": ("pygments.lexers.configs", "IniLexer"),
".pro": ("pygments.lexers.prolog", "PrologLexer"),
".sql": ("pygments.lexers.sql", "SqlLexer"),
".txt": ("pygments.lexers.special", "TextLexer"),
".html": ("pygments.lexers.html", "HtmlLexer"),
}
exts = {}
lexers = {"exts": exts}
if DEBUG:
from collections import defaultdict
duplicates = defaultdict(set)
for longname, _, filenames, _ in get_all_lexers():
cls = find_lexer_class(longname)
mod = inspect.getmodule(cls)
val = (mod.__name__, cls.__name__)
for filename in filenames:
if filename.startswith("*."):
filename = filename[1:]
if "*" in filename:
continue
if (
DEBUG
and filename in exts
and exts[filename] != val
and filename not in default_exts
):
duplicates[filename].add(val)
duplicates[filename].add(exts[filename])
exts[filename] = val
# remove some ambiquity
exts.update(default_exts)
# print duplicate message
if DEBUG:
_print_duplicate_message(duplicates)
return lexers
def _discover_formatters():
import inspect
from pygments.formatters import get_all_formatters
# maps file extension (and names) to (module, classname) tuples
default_exts = {}
exts = {}
# maps formatter 'name' (not the class name) and alias to (module, classname) tuples
default_names = {}
names = {}
formatters = {"exts": exts, "names": names}
if DEBUG:
from collections import defaultdict
duplicates = defaultdict(set)
for cls in get_all_formatters():
mod = inspect.getmodule(cls)
val = (mod.__name__, cls.__name__)
# add extentions
for filename in cls.filenames:
if filename.startswith("*."):
filename = filename[1:]
if "*" in filename:
continue
if (
DEBUG
and filename in exts
and exts[filename] != val
and filename not in default_exts
):
duplicates[filename].add(val)
duplicates[filename].add(exts[filename])
exts[filename] = val
# add names and aliases
names[cls.name] = val
for alias in cls.aliases:
if (
DEBUG
and alias in names
and names[alias] != val
and alias not in default_names
):
duplicates[alias].add(val)
duplicates[alias].add(names[alias])
names[alias] = val
# remove some ambiquity
exts.update(default_exts)
names.update(default_names)
# print dumplicate message
if DEBUG:
_print_duplicate_message(duplicates)
return formatters
def _discover_styles():
import inspect
from pygments.styles import get_all_styles, get_style_by_name
# maps style 'name' (not the class name) and aliases to (module, classname) tuples
default_names = {}
names = {}
styles = {"names": names}
if DEBUG:
from collections import defaultdict
duplicates = defaultdict(set)
for name in get_all_styles():
cls = get_style_by_name(name)
mod = inspect.getmodule(cls)
val = (mod.__name__, cls.__name__)
if DEBUG and name in names and names[name] != val and name not in default_names:
duplicates[name].add(val)
duplicates[name].add(names[name])
names[name] = val
# remove some ambiquity
names.update(default_names)
# print dumplicate message
if DEBUG:
_print_duplicate_message(duplicates)
return styles
def _discover_filters():
import inspect
from pygments.filters import get_all_filters, get_filter_by_name
# maps filter 'name' (not the class name) to (module, classname) tuples
default_names = {}
names = {}
filters = {"names": names}
if DEBUG:
from collections import defaultdict
duplicates = defaultdict(set)
for name in get_all_filters():
filter = get_filter_by_name(name)
cls = type(filter)
mod = inspect.getmodule(cls)
val = (mod.__name__, cls.__name__)
if DEBUG and name in names and names[name] != val and name not in default_names:
duplicates[name].add(val)
duplicates[name].add(names[name])
names[name] = val
# remove some ambiquity
names.update(default_names)
# print dumplicate message
if DEBUG:
_print_duplicate_message(duplicates)
return filters
def build_cache():
"""Does the hard work of building a cache from nothing."""
cache = {}
cache["lexers"] = _discover_lexers()
cache["formatters"] = _discover_formatters()
cache["styles"] = _discover_styles()
cache["filters"] = _discover_filters()
return cache
def cache_filename():
"""Gets the name of the cache file to use."""
# Configuration variables read from the environment
if "PYGMENTS_CACHE_FILE" in os.environ:
return os.environ["PYGMENTS_CACHE_FILE"]
else:
return os.path.join(
os.environ.get(
"XDG_DATA_HOME",
os.path.join(os.path.expanduser("~"), ".local", "share"),
),
"pygments-cache",
"cache.py",
)
def add_custom_style(name, style):
"""Register custom style to be able to retrieve it by ``get_style_by_name``.
Parameters
----------
name : str
Style name.
style : pygments.Style
Custom style to add.
"""
CUSTOM_STYLES[name] = style
def load(filename):
"""Loads the cache from a filename."""
global CACHE
with open(filename) as f:
s = f.read()
ctx = globals()
CACHE = eval(s, ctx, ctx)
return CACHE
def write_cache(filename):
"""Writes the current cache to the file"""
from pprint import pformat
d = os.path.dirname(filename)
os.makedirs(d, exist_ok=True)
s = pformat(CACHE)
with open(filename, "w") as f:
f.write(s)
def load_or_build():
"""Loads the cache from disk. If the cache does not exist,
this will build and write it out.
"""
global CACHE
fname = cache_filename()
if os.path.exists(fname):
load(fname)
else:
import sys
if DEBUG:
print("pygments cache not found, building...", file=sys.stderr)
CACHE = build_cache()
if DEBUG:
print("...writing cache to " + fname, file=sys.stderr)
write_cache(fname)
#
# pygments interface
#
def get_lexer_for_filename(filename, text="", **options):
"""Gets a lexer from a filename (usually via the filename extension).
This mimics the behavior of ``pygments.lexers.get_lexer_for_filename()``
and ``pygments.lexers.guess_lexer_for_filename()``.
"""
if CACHE is None:
load_or_build()
exts = CACHE["lexers"]["exts"]
fname = os.path.basename(filename)
key = fname if fname in exts else os.path.splitext(fname)[1]
if key in exts:
modname, clsname = exts[key]
mod = importlib.import_module(modname)
cls = getattr(mod, clsname)
lexer = cls(**options)
else:
# couldn't find lexer in cache, fallback to the hard way
import inspect
from pygments.lexers import guess_lexer_for_filename
lexer = guess_lexer_for_filename(filename, text, **options)
# add this filename to the cache for future use
cls = type(lexer)
mod = inspect.getmodule(cls)
exts[fname] = (mod.__name__, cls.__name__)
write_cache(cache_filename())
return lexer
guess_lexer_for_filename = get_lexer_for_filename
def get_formatter_for_filename(fn, **options):
"""Gets a formatter instance from a filename (usually via the filename
extension). This mimics the behavior of
``pygments.formatters.get_formatter_for_filename()``.
"""
if CACHE is None:
load_or_build()
exts = CACHE["formatters"]["exts"]
fname = os.path.basename(fn)
key = fname if fname in exts else os.path.splitext(fname)[1]
if key in exts:
modname, clsname = exts[key]
mod = importlib.import_module(modname)
cls = getattr(mod, clsname)
formatter = cls(**options)
else:
# couldn't find formatter in cache, fallback to the hard way
import inspect
from pygments.formatters import get_formatter_for_filename
formatter = get_formatter_for_filename(fn, **options)
# add this filename to the cache for future use
cls = type(formatter)
mod = inspect.getmodule(cls)
exts[fname] = (mod.__name__, cls.__name__)
write_cache(cache_filename())
return formatter
def get_formatter_by_name(alias, **options):
"""Gets a formatter instance from its name or alias.
This mimics the behavior of ``pygments.formatters.get_formatter_by_name()``.
"""
if CACHE is None:
load_or_build()
names = CACHE["formatters"]["names"]
if alias in names:
modname, clsname = names[alias]
mod = importlib.import_module(modname)
cls = getattr(mod, clsname)
formatter = cls(**options)
else:
# couldn't find formatter in cache, fallback to the hard way
import inspect
from pygments.formatters import get_formatter_by_name
formatter = get_formatter_by_name(alias, **options)
# add this filename to the cache for future use
cls = type(formatter)
mod = inspect.getmodule(cls)
names[alias] = (mod.__name__, cls.__name__)
write_cache(cache_filename())
return formatter
def get_style_by_name(name):
"""Gets a style class from its name or alias.
This mimics the behavior of ``pygments.styles.get_style_by_name()``.
"""
if CACHE is None:
load_or_build()
names = CACHE["styles"]["names"]
if name in names:
modname, clsname = names[name]
mod = importlib.import_module(modname)
style = getattr(mod, clsname)
elif name in CUSTOM_STYLES:
style = CUSTOM_STYLES[name]
else:
# couldn't find style in cache, fallback to the hard way
import inspect
from pygments.styles import get_style_by_name
style = get_style_by_name(name)
# add this style to the cache for future use
mod = inspect.getmodule(style)
names[name] = (mod.__name__, style.__name__)
write_cache(cache_filename())
return style
def get_all_styles():
"""Iterable through all known style names.
This mimics the behavior of ``pygments.styles.get_all_styles``.
"""
if CACHE is None:
load_or_build()
yield from CACHE["styles"]["names"]
yield from CUSTOM_STYLES
def get_filter_by_name(filtername, **options):
"""Gets a filter instance from its name. This mimics the behavior of
``pygments.filters.get_filtere_by_name()``.
"""
if CACHE is None:
load_or_build()
names = CACHE["filters"]["names"]
if filtername in names:
modname, clsname = names[filtername]
mod = importlib.import_module(modname)
cls = getattr(mod, clsname)
filter = cls(**options)
else:
# couldn't find style in cache, fallback to the hard way
import inspect
from pygments.filters import get_filter_by_name
filter = get_filter_by_name(filtername, **options)
# add this filter to the cache for future use
cls = type(filter)
mod = inspect.getmodule(cls)
names[filtername] = (mod.__name__, cls.__name__)
write_cache(cache_filename())
return filter
|
py | b41656c70551fe6ce8f1101766f6711d64c4ab75 | def error():
raise Exception, "Hello"
error() |
py | b41658a5e9832890da5ef0160448f19b20c5efd6 | from __future__ import absolute_import, division, unicode_literals
from future.builtins import int, open, str
from hashlib import md5
import os
import re
try:
from urllib.parse import quote, unquote
except ImportError:
from urllib import quote, unquote
from django.apps import apps
from django.contrib import admin
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.sites.models import Site
from django.core.exceptions import ObjectDoesNotExist
from django.core.files import File
from django.core.files.storage import default_storage
from django.core.urlresolvers import reverse, resolve, NoReverseMatch
from django.db.models import Model
from django.template import Node, Template, TemplateSyntaxError
from django.template.base import (TOKEN_BLOCK, TOKEN_COMMENT,
TOKEN_TEXT, TOKEN_VAR, TextNode)
from django.template.defaultfilters import escape
from django.template.loader import get_template
from django.utils import translation
from django.utils.html import strip_tags
from django.utils.text import capfirst
from django.utils.safestring import SafeText, mark_safe
from mezzanine.conf import settings
from mezzanine.core.fields import RichTextField
from mezzanine.core.forms import get_edit_form
from mezzanine.utils.cache import nevercache_token, cache_installed
from mezzanine.utils.html import decode_entities
from mezzanine.utils.importing import import_dotted_path
from mezzanine.utils.sites import current_site_id, has_site_permission
from mezzanine.utils.urls import admin_url, home_slug
from mezzanine.utils.views import is_editable
from mezzanine import template
register = template.Library()
if "compressor" in settings.INSTALLED_APPS:
@register.tag
def compress(parser, token):
"""
Shadows django-compressor's compress tag so it can be
loaded from ``mezzanine_tags``, allowing us to provide
a dummy version when django-compressor isn't installed.
"""
from compressor.templatetags.compress import compress
return compress(parser, token)
else:
@register.to_end_tag
def compress(parsed, context, token):
"""
Dummy tag for fallback when django-compressor isn't installed.
"""
return parsed
def initialize_nevercache():
if cache_installed():
@register.tag
def nevercache(parser, token):
"""
Tag for two phased rendering. Converts enclosed template
code and content into text, which gets rendered separately
in ``mezzanine.core.middleware.UpdateCacheMiddleware``.
This is to bypass caching for the enclosed code and content.
"""
text = []
end_tag = "endnevercache"
tag_mapping = {
TOKEN_TEXT: ("", ""),
TOKEN_VAR: ("{{", "}}"),
TOKEN_BLOCK: ("{%", "%}"),
TOKEN_COMMENT: ("{#", "#}"),
}
delimiter = nevercache_token()
while parser.tokens:
token = parser.next_token()
token_type = token.token_type
if token_type == TOKEN_BLOCK and token.contents == end_tag:
return TextNode(delimiter + "".join(text) + delimiter)
start, end = tag_mapping[token_type]
text.append("%s%s%s" % (start, token.contents, end))
parser.unclosed_block_tag(end_tag)
else:
@register.to_end_tag
def nevercache(parsed, context, token):
"""
Dummy fallback ``nevercache`` for when caching is not
configured.
"""
return parsed
initialize_nevercache()
class_re = re.compile(r'(?<=class=["\'])(.*?)(?=["\'])')
@register.filter
def add_class(value, css_class):
"""
Add a css class while rendering form field. Used to inject form-control
in form input fields for bootstrap3.0 full functionality.
"""
string = unicode(value)
match = class_re.search(string)
if match:
m = re.search(r'^%s$|^%s\s|\s%s\s|\s%s$' % (css_class, css_class,
css_class, css_class),
match.group(1))
print match.group(1)
if not m:
return mark_safe(class_re.sub(match.group(1) + " " + css_class,
string))
else:
return mark_safe(string.replace('>', ' class="%s">' % css_class, 1))
return value
@register.simple_tag(takes_context=True)
def fields_for(context, form, template="includes/form_fields.html"):
"""
Renders fields for a form with an optional template choice.
"""
context["form_for_fields"] = form
return get_template(template).render(context.flatten())
@register.inclusion_tag("includes/form_errors.html")
def errors_for(form):
"""
Renders an alert if the form has any errors.
"""
return {"form": form}
@register.filter
def sort_by(items, attr):
"""
General sort filter - sorts by either attribute or key.
"""
def key_func(item):
try:
return getattr(item, attr)
except AttributeError:
try:
return item[attr]
except TypeError:
getattr(item, attr) # Reraise AttributeError
return sorted(items, key=key_func)
@register.filter
def is_installed(app_name):
"""
Returns ``True`` if the given app name is in the
``INSTALLED_APPS`` setting.
"""
from warnings import warn
warn("The is_installed filter is deprecated. Please use the tag "
"{% ifinstalled appname %}{% endifinstalled %}")
return app_name in settings.INSTALLED_APPS
@register.tag
def ifinstalled(parser, token):
"""
Old-style ``if`` tag that renders contents if the given app is
installed. The main use case is:
{% ifinstalled app_name %}
{% include "app_name/template.html" %}
{% endifinstalled %}
so we need to manually pull out all tokens if the app isn't
installed, since if we used a normal ``if`` tag with a False arg,
the include tag will still try and find the template to include.
"""
try:
tag, app = token.split_contents()
except ValueError:
raise TemplateSyntaxError("ifinstalled should be in the form: "
"{% ifinstalled app_name %}"
"{% endifinstalled %}")
end_tag = "end" + tag
unmatched_end_tag = 1
if app.strip("\"'") not in settings.INSTALLED_APPS:
while unmatched_end_tag:
token = parser.tokens.pop(0)
if token.token_type == TOKEN_BLOCK:
block_name = token.contents.split()[0]
if block_name == tag:
unmatched_end_tag += 1
if block_name == end_tag:
unmatched_end_tag -= 1
parser.tokens.insert(0, token)
nodelist = parser.parse((end_tag,))
parser.delete_first_token()
class IfInstalledNode(Node):
def render(self, context):
return nodelist.render(context)
return IfInstalledNode()
@register.render_tag
def set_short_url_for(context, token):
"""
Sets the ``short_url`` attribute of the given model for share
links in the template.
"""
obj = context[token.split_contents()[1]]
obj.set_short_url()
return ""
@register.simple_tag
def gravatar_url(email, size=32):
"""
Return the full URL for a Gravatar given an email hash.
"""
bits = (md5(email.lower().encode("utf-8")).hexdigest(), size)
return "//www.gravatar.com/avatar/%s?s=%s&d=identicon&r=PG" % bits
@register.to_end_tag
def metablock(parsed):
"""
Remove HTML tags, entities and superfluous characters from
meta blocks.
"""
parsed = " ".join(parsed.replace("\n", "").split()).replace(" ,", ",")
return escape(strip_tags(decode_entities(parsed)))
@register.inclusion_tag("includes/pagination.html", takes_context=True)
def pagination_for(context, current_page, page_var="page", exclude_vars=""):
"""
Include the pagination template and data for persisting querystring
in pagination links. Can also contain a comma separated string of
var names in the current querystring to exclude from the pagination
links, via the ``exclude_vars`` arg.
"""
querystring = context["request"].GET.copy()
exclude_vars = [v for v in exclude_vars.split(",") if v] + [page_var]
for exclude_var in exclude_vars:
if exclude_var in querystring:
del querystring[exclude_var]
querystring = querystring.urlencode()
return {
"current_page": current_page,
"querystring": querystring,
"page_var": page_var,
}
@register.inclusion_tag("includes/search_form.html", takes_context=True)
def search_form(context, search_model_names=None):
"""
Includes the search form with a list of models to use as choices
for filtering the search by. Models should be a string with models
in the format ``app_label.model_name`` separated by spaces. The
string ``all`` can also be used, in which case the models defined
by the ``SEARCH_MODEL_CHOICES`` setting will be used.
"""
template_vars = {
"request": context["request"],
}
if not search_model_names or not settings.SEARCH_MODEL_CHOICES:
search_model_names = []
elif search_model_names == "all":
search_model_names = list(settings.SEARCH_MODEL_CHOICES)
else:
search_model_names = search_model_names.split(" ")
search_model_choices = []
for model_name in search_model_names:
try:
model = apps.get_model(*model_name.split(".", 1))
except LookupError:
pass
else:
verbose_name = model._meta.verbose_name_plural.capitalize()
search_model_choices.append((verbose_name, model_name))
template_vars["search_model_choices"] = sorted(search_model_choices)
return template_vars
@register.simple_tag
def thumbnail(image_url, width, height, upscale=True, quality=95, left=.5,
top=.5, padding=False, padding_color="#fff"):
"""
Given the URL to an image, resizes the image using the given width
and height on the first time it is requested, and returns the URL
to the new resized image. If width or height are zero then original
ratio is maintained. When ``upscale`` is False, images smaller than
the given size will not be grown to fill that size. The given width
and height thus act as maximum dimensions.
"""
if not image_url:
return ""
try:
from PIL import Image, ImageFile, ImageOps
except ImportError:
return ""
image_url = unquote(str(image_url)).split("?")[0]
if image_url.startswith(settings.MEDIA_URL):
image_url = image_url.replace(settings.MEDIA_URL, "", 1)
image_dir, image_name = os.path.split(image_url)
image_prefix, image_ext = os.path.splitext(image_name)
filetype = {".png": "PNG", ".gif": "GIF"}.get(image_ext, "JPEG")
thumb_name = "%s-%sx%s" % (image_prefix, width, height)
if not upscale:
thumb_name += "-no-upscale"
if left != .5 or top != .5:
left = min(1, max(0, left))
top = min(1, max(0, top))
thumb_name = "%s-%sx%s" % (thumb_name, left, top)
thumb_name += "-padded-%s" % padding_color if padding else ""
thumb_name = "%s%s" % (thumb_name, image_ext)
# `image_name` is used here for the directory path, as each image
# requires its own sub-directory using its own name - this is so
# we can consistently delete all thumbnails for an individual
# image, which is something we do in filebrowser when a new image
# is written, allowing us to purge any previously generated
# thumbnails that may match a new image name.
thumb_dir = os.path.join(settings.MEDIA_ROOT, image_dir,
settings.THUMBNAILS_DIR_NAME, image_name)
if not os.path.exists(thumb_dir):
try:
os.makedirs(thumb_dir)
except OSError:
pass
thumb_path = os.path.join(thumb_dir, thumb_name)
thumb_url = "%s/%s/%s" % (settings.THUMBNAILS_DIR_NAME,
quote(image_name.encode("utf-8")),
quote(thumb_name.encode("utf-8")))
image_url_path = os.path.dirname(image_url)
if image_url_path:
thumb_url = "%s/%s" % (image_url_path, thumb_url)
try:
thumb_exists = os.path.exists(thumb_path)
except UnicodeEncodeError:
# The image that was saved to a filesystem with utf-8 support,
# but somehow the locale has changed and the filesystem does not
# support utf-8.
from mezzanine.core.exceptions import FileSystemEncodingChanged
raise FileSystemEncodingChanged()
if thumb_exists:
# Thumbnail exists, don't generate it.
return thumb_url
elif not default_storage.exists(image_url):
# Requested image does not exist, just return its URL.
return image_url
f = default_storage.open(image_url)
try:
image = Image.open(f)
except:
# Invalid image format.
return image_url
image_info = image.info
# Transpose to align the image to its orientation if necessary.
# If the image is transposed, delete the exif information as
# not all browsers support the CSS image-orientation:
# - http://caniuse.com/#feat=css-image-orientation
try:
orientation = image._getexif().get(0x0112)
except:
orientation = None
if orientation:
methods = {
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)}.get(orientation, ())
if methods:
image_info.pop('exif', None)
for method in methods:
image = image.transpose(method)
to_width = int(width)
to_height = int(height)
from_width = image.size[0]
from_height = image.size[1]
if not upscale:
to_width = min(to_width, from_width)
to_height = min(to_height, from_height)
# Set dimensions.
if to_width == 0:
to_width = from_width * to_height // from_height
elif to_height == 0:
to_height = from_height * to_width // from_width
if image.mode not in ("P", "L", "RGBA") \
and filetype not in ("JPG", "JPEG"):
try:
image = image.convert("RGBA")
except:
return image_url
# Required for progressive jpgs.
ImageFile.MAXBLOCK = 2 * (max(image.size) ** 2)
# Padding.
if padding and to_width and to_height:
from_ratio = float(from_width) / from_height
to_ratio = float(to_width) / to_height
pad_size = None
if to_ratio < from_ratio:
pad_height = int(to_height * (float(from_width) / to_width))
pad_size = (from_width, pad_height)
pad_top = (pad_height - from_height) // 2
pad_left = 0
elif to_ratio > from_ratio:
pad_width = int(to_width * (float(from_height) / to_height))
pad_size = (pad_width, from_height)
pad_top = 0
pad_left = (pad_width - from_width) // 2
if pad_size is not None:
pad_container = Image.new("RGBA", pad_size, padding_color)
pad_container.paste(image, (pad_left, pad_top))
image = pad_container
# Create the thumbnail.
to_size = (to_width, to_height)
to_pos = (left, top)
try:
image = ImageOps.fit(image, to_size, Image.ANTIALIAS, 0, to_pos)
image = image.save(thumb_path, filetype, quality=quality, **image_info)
# Push a remote copy of the thumbnail if MEDIA_URL is
# absolute.
if "://" in settings.MEDIA_URL:
with open(thumb_path, "rb") as f:
default_storage.save(unquote(thumb_url), File(f))
except Exception:
# If an error occurred, a corrupted image may have been saved,
# so remove it, otherwise the check for it existing will just
# return the corrupted image next time it's requested.
try:
os.remove(thumb_path)
except Exception:
pass
return image_url
return thumb_url
@register.inclusion_tag("includes/editable_loader.html", takes_context=True)
def editable_loader(context):
"""
Set up the required JS/CSS for the in-line editing toolbar and controls.
"""
user = context["request"].user
template_vars = {
"has_site_permission": has_site_permission(user),
"request": context["request"],
}
if (settings.INLINE_EDITING_ENABLED and
template_vars["has_site_permission"]):
t = get_template("includes/editable_toolbar.html")
template_vars["REDIRECT_FIELD_NAME"] = REDIRECT_FIELD_NAME
template_vars["editable_obj"] = context.get("editable_obj",
context.get("page", None))
template_vars["accounts_logout_url"] = context.get(
"accounts_logout_url", None)
template_vars["toolbar"] = t.render(template_vars)
template_vars["richtext_media"] = RichTextField().formfield(
).widget.media
return template_vars
@register.filter
def richtext_filters(content):
"""
Takes a value edited via the WYSIWYG editor, and passes it through
each of the functions specified by the RICHTEXT_FILTERS setting.
"""
for filter_name in settings.RICHTEXT_FILTERS:
filter_func = import_dotted_path(filter_name)
content = filter_func(content)
if not isinstance(content, SafeText):
# raise TypeError(
# filter_name + " must mark it's return value as safe. See "
# "https://docs.djangoproject.com/en/stable/topics/security/"
# "#cross-site-scripting-xss-protection")
import warnings
warnings.warn(
filter_name + " needs to ensure that any untrusted inputs are "
"properly escaped and mark the html it returns as safe. In a "
"future release this will cause an exception. See "
"https://docs.djangoproject.com/en/stable/topics/security/"
"cross-site-scripting-xss-protection",
FutureWarning)
content = mark_safe(content)
return content
@register.to_end_tag
def editable(parsed, context, token):
"""
Add the required HTML to the parsed content for in-line editing,
such as the icon and edit form if the object is deemed to be
editable - either it has an ``editable`` method which returns
``True``, or the logged in user has change permissions for the
model.
"""
def parse_field(field):
field = field.split(".")
obj = context.get(field.pop(0), None)
attr = field.pop()
while field:
obj = getattr(obj, field.pop(0))
if callable(obj):
# Allows {% editable page.get_content_model.content %}
obj = obj()
return obj, attr
fields = [parse_field(f) for f in token.split_contents()[1:]]
if fields:
fields = [f for f in fields if len(f) == 2 and f[0] is fields[0][0]]
if not parsed.strip():
try:
parsed = "".join([str(getattr(*field)) for field in fields])
except AttributeError:
pass
if settings.INLINE_EDITING_ENABLED and fields and "request" in context:
obj = fields[0][0]
if isinstance(obj, Model) and is_editable(obj, context["request"]):
field_names = ",".join([f[1] for f in fields])
context["editable_form"] = get_edit_form(obj, field_names)
context["original"] = parsed
t = get_template("includes/editable_form.html")
return t.render(context.flatten())
return parsed
@register.simple_tag
def try_url(url_name):
"""
Mimics Django's ``url`` template tag but fails silently. Used for
url names in admin templates as these won't resolve when admin
tests are running.
"""
from warnings import warn
warn("try_url is deprecated, use the url tag with the 'as' arg instead.")
try:
url = reverse(url_name)
except NoReverseMatch:
return ""
return url
def admin_app_list(request):
"""
Adopted from ``django.contrib.admin.sites.AdminSite.index``.
Returns a list of lists of models grouped and ordered according to
``mezzanine.conf.ADMIN_MENU_ORDER``. Called from the
``admin_dropdown_menu`` template tag as well as the ``app_list``
dashboard widget.
"""
app_dict = {}
# Model or view --> (group index, group title, item index, item title).
menu_order = {}
for (group_index, group) in enumerate(settings.ADMIN_MENU_ORDER):
group_title, items = group
for (item_index, item) in enumerate(items):
if isinstance(item, (tuple, list)):
item_title, item = item
else:
item_title = None
menu_order[item] = (group_index, group_title,
item_index, item_title)
# Add all registered models, using group and title from menu order.
for (model, model_admin) in admin.site._registry.items():
opts = model._meta
in_menu = not hasattr(model_admin, "in_menu") or model_admin.in_menu()
if hasattr(model_admin, "in_menu"):
import warnings
warnings.warn(
'ModelAdmin.in_menu() has been replaced with '
'ModelAdmin.has_module_permission(request). See '
'https://docs.djangoproject.com/en/stable/ref/contrib/admin/'
'#django.contrib.admin.ModelAdmin.has_module_permission.',
DeprecationWarning)
in_menu = in_menu and model_admin.has_module_permission(request)
if in_menu and request.user.has_module_perms(opts.app_label):
admin_url_name = ""
if model_admin.has_change_permission(request):
admin_url_name = "changelist"
change_url = admin_url(model, admin_url_name)
else:
change_url = None
if model_admin.has_add_permission(request):
admin_url_name = "add"
add_url = admin_url(model, admin_url_name)
else:
add_url = None
if admin_url_name:
model_label = "%s.%s" % (opts.app_label, opts.object_name)
try:
app_index, app_title, model_index, model_title = \
menu_order[model_label]
except KeyError:
app_index = None
try:
app_title = opts.app_config.verbose_name.title()
except AttributeError:
# Third party admin classes doing weird things.
# See GH #1628
app_title = ""
model_index = None
model_title = None
else:
del menu_order[model_label]
if not model_title:
model_title = capfirst(model._meta.verbose_name_plural)
if app_title not in app_dict:
app_dict[app_title] = {
"index": app_index,
"name": app_title,
"models": [],
}
app_dict[app_title]["models"].append({
"index": model_index,
"perms": model_admin.get_model_perms(request),
"name": model_title,
"object_name": opts.object_name,
"admin_url": change_url,
"add_url": add_url
})
# Menu may also contain view or url pattern names given as (title, name).
for (item_url, item) in menu_order.items():
app_index, app_title, item_index, item_title = item
try:
item_url = reverse(item_url)
except NoReverseMatch:
continue
if app_title not in app_dict:
app_dict[app_title] = {
"index": app_index,
"name": app_title,
"models": [],
}
app_dict[app_title]["models"].append({
"index": item_index,
"perms": {"custom": True},
"name": item_title,
"admin_url": item_url,
})
app_list = list(app_dict.values())
sort = lambda x: (x["index"] if x["index"] is not None else 999, x["name"])
for app in app_list:
app["models"].sort(key=sort)
app_list.sort(key=sort)
return app_list
@register.inclusion_tag("admin/includes/dropdown_menu.html",
takes_context=True)
def admin_dropdown_menu(context):
"""
Renders the app list for the admin dropdown menu navigation.
"""
user = context["request"].user
if user.is_staff:
context["dropdown_menu_app_list"] = admin_app_list(context["request"])
if user.is_superuser:
sites = Site.objects.all()
else:
try:
sites = user.sitepermissions.sites.all()
except ObjectDoesNotExist:
sites = Site.objects.none()
context["dropdown_menu_sites"] = list(sites)
context["dropdown_menu_selected_site_id"] = current_site_id()
return context.flatten()
@register.inclusion_tag("admin/includes/app_list.html", takes_context=True)
def app_list(context):
"""
Renders the app list for the admin dashboard widget.
"""
context["dashboard_app_list"] = admin_app_list(context["request"])
return context.flatten()
@register.inclusion_tag("admin/includes/recent_actions.html",
takes_context=True)
def recent_actions(context):
"""
Renders the recent actions list for the admin dashboard widget.
"""
return context.flatten()
@register.render_tag
def dashboard_column(context, token):
"""
Takes an index for retrieving the sequence of template tags from
``mezzanine.conf.DASHBOARD_TAGS`` to render into the admin
dashboard.
"""
column_index = int(token.split_contents()[1])
output = []
for tag in settings.DASHBOARD_TAGS[column_index]:
t = Template("{%% load %s %%}{%% %s %%}" % tuple(tag.split(".")))
output.append(t.render(context))
return "".join(output)
@register.simple_tag(takes_context=True)
def translate_url(context, language):
"""
Translates the current URL for the given language code, eg:
{% translate_url "de" %}
"""
try:
request = context["request"]
except KeyError:
return ""
view = resolve(request.path)
current_language = translation.get_language()
translation.activate(language)
if not view.namespace and view.url_name == "home":
url = home_slug()
else:
try:
url = reverse(view.func, args=view.args, kwargs=view.kwargs)
except NoReverseMatch:
try:
url_name = (view.url_name if not view.namespace
else '%s:%s' % (view.namespace, view.url_name))
url = reverse(url_name, args=view.args, kwargs=view.kwargs)
except NoReverseMatch:
url_name = "admin:" + view.url_name
url = reverse(url_name, args=view.args, kwargs=view.kwargs)
translation.activate(current_language)
qs = context['request'].META.get("QUERY_STRING", "")
if qs:
url += "?" + qs
return url
|
py | b41658d5c02a3acc5a6c4032a9062b9470e5bbdb | import csv
import sys
import re
###########
# This is a 3.X python file.
# Takes the responses downloaded from the UBC survey tool and
# "flips" some of the data and makes it into a readable format
# Rama Flarsheim. Nov 15, 2015
# if the name of this script is surveyCleaner.py
# use like: python surveyCleaner.py "responses (4).csv" temp.tsv
#
# creates a tab seperated CSV file
###########
inputFileName = sys.argv[1] #first argument
outputFileName = sys.argv[2]
csvfile=open(inputFileName,encoding="utf_16", errors="surrogateescape")
reader = csv.DictReader(csvfile, dialect="excel-tab")
#create feildnames
#hardcoded. TODO: make dynamic off of a list of context questions encountered while parsing the input file
fieldnames=reader.fieldnames #read existing feildnames
#context variables headings
fieldnames.append("Name of project")
fieldnames.append("Name of PI or project lead(s)") #append new
fieldnames.append("Please enter your name")
fieldnames.append("Type of project")
fieldnames.append("Project stage")
fieldnames.append("Year awarded")
fieldnames.append("Faculty_School")
fieldnames.append("Department")
fieldnames.append("Short description of project")
fieldnames.append("Primary course format")
fieldnames.append("Course_Level")
fieldnames.append("Course type")
fieldnames.append("Enrolment cap")
fieldnames.append("Course location")
#for Sankey diagram
fieldnames.append("source")
fieldnames.append("target")
fieldnames.append("source long name")
fieldnames.append("target long name")
fieldnames.append("value")
#for the heat maps
fieldnames.append("matrix") #will be either innovationXimpact or impactXapproach
#context variables
Name_of_Project = ""
Name_of_PI_or_project_lead = ""
EnterersName = ""
Type_of_project = ""
Project_Stage = ""
Year_Awarded = ""
Faculty_School = ""
Department = ""
Short_description_of_project = ""
Course_Format = ""
Course_Level = ""
Course_Type = ""
Enrolment_Cap = ""
Course_Location = ""
#for Sankey diagram
source = ""
target = ""
source_long_name = ""
target_long_name = ""
value = 1
#for the heat maps
matrix = ""
internalIDnum = -1 #used in differentiating the different surveys. When it changes, reset the context variables
#for testing purposes
Name_of_Project = 'testTEST 1'
#collection of regular expressions
innovationListRE = re.compile('\[Elements_of_Innovation_list\]') # used for filtering out the innovation list
nameOfProjectRE = re.compile("Name of project")
Name_of_PI_or_project_leadRE = re.compile("Name of PI or project lead")
EnterersNameRE = re.compile("Please enter your name")
Type_of_projectRE = re.compile("Type of project")
Project_StageRE = re.compile("Project stage")
Year_AwardedRE = re.compile("Year awarded")
Faculty_SchoolRE = re.compile("Faculty/School")
DepartmentRE = re.compile("Department")
Short_description_of_projectRE = re.compile("Short description of project")
Course_FormatRE = re.compile("Primary course format")
Course_LevelRE = re.compile("Course level")
Course_TypeRE = re.compile("Course type")
Enrolment_CapRE = re.compile("Enrolment cap")
Course_LocationRE = re.compile("Course location")
wordFromQuestionRE = re.compile("\|\s([^\t]*)") #everything after the | and before the tab.
matrixRE = re.compile("\[[a-z_]*") #matches [ and then lower case letters and underscore ex)[innova_impact or [immp_eval
#notExamplesRE = re.compile("\([^\)]*\)") #matches stuff in parenthesis including the parenthesis
notExamplesRE = re.compile("(^.*)\(") #want everything from the start of the line till a (
#notExamplesRE = re.compile(".*")
ofile = open(outputFileName, 'w',encoding="utf_8",newline='',errors="surrogateescape")
writer=csv.DictWriter(ofile,fieldnames=fieldnames,dialect="excel-tab",restval="") #option to quote stuff here but doesn't seem to work when using d3.tsv.parse (on a local file) instead of just d3.tsv and linking the file in
writer.writeheader() #write hearder for CSV file
for row in reader:
print('here', row, '\n')
if row["Internal ID"] != internalIDnum: #indicates bnew survey data
internalIDnum=row["Internal ID"]
Name_of_Project = ""
Name_of_PI_or_project_lead = ""
EnterersName = ""
Type_of_project = ""
Project_Stage = ""
Year_Awarded = ""
Faculty_School = ""
Department = ""
Short_description_of_project = ""
Course_Format = ""
Course_Level = ""
Course_Type = ""
Enrolment_Cap = ""
Course_Location = ""
if innovationListRE.match(row["Question"])!=None:#if it's != None then it's found :)
continue #don't do anything when you see these. essentially delete these rows
if nameOfProjectRE.match(row["Question"])!=None:
Name_of_Project = row["Comment"]
continue
if Name_of_PI_or_project_leadRE.match(row["Question"])!=None:
Name_of_PI_or_project_lead = row["Comment"]
continue
if EnterersNameRE.match(row["Question"])!=None:
EnterersName = row["Comment"]
continue
if Type_of_projectRE.match(row["Question"])!=None:
Type_of_project = row["Response"]
continue
if Project_StageRE.match(row["Question"])!=None:
Project_Stage = row["Response"]
continue
if Year_AwardedRE.match(row["Question"])!=None:
Year_Awarded = row["Comment"]
continue
if Faculty_SchoolRE.match(row["Question"])!=None:
Faculty_School = row["Response"]
continue
if DepartmentRE.match(row["Question"])!=None:
Department = row["Comment"]
continue
if Short_description_of_projectRE.match(row["Question"])!=None:
Short_description_of_project = row["Comment"]
continue
if Course_FormatRE.match(row["Question"])!=None:
if Course_Format == "": Course_Format = row["Response"]
else: Course_Format = Course_Format + ", " + row["Response"]
continue
if Course_LevelRE.match(row["Question"])!=None:
if Course_Level == "": Course_Level = row["Response"]
else: Course_Level = Course_Level + ", " + row["Response"]
continue
if Course_TypeRE.match(row["Question"])!=None:
if Course_Type == "": Course_Type = row["Response"]
else: Course_Type = Course_Type + ", " + row["Response"]
continue
if Enrolment_CapRE.match(row["Question"])!=None:
Enrolment_Cap = row["Response"]
continue
if Course_LocationRE.match(row["Question"])!=None:
if Course_Location == "": Course_Location = row["Response"]
else: Course_Location = Course_Location + ", " + row["Response"]
continue
#get some data about the nature of the question
print(row["Question"])
m2 = matrixRE.match(row["Question"])
try:
if "If you selected" in row["Question"]:
continue
elif m2.group() == "[innova_impact":
matrix = "innovationXimpact"
elif m2.group() == "[immp_eval":
matrix = "impactXapproach"
else: matrix = ""
except AttributeError:
print(row)
#print (matrix)
# parse out the part of the question that contains the source part of the link pairing.
#ex: [INN_IMP.5] What is the impact of… [ELEMENT OF INNOVATION] on… [INTENDED AREA OF IMPACT] ? (choose all that apply) | In-class content delivery (e.g., demos)
# should just be "In-class content delivery (e.g., demos)" in the source
m = wordFromQuestionRE.search(row["Question"]) # search() searches within the string, match() only matches if the string starts with the pattern.
if m: #m will be false if there isn't a match
#print (m.group(1))
source_long_name = m.group(1)
source = source_long_name
if notExamplesRE.search(source_long_name) !=None:
source = notExamplesRE.search(source_long_name).group(1).rstrip() #rstip to remove trialling whitespace
#I want to check if there is a match. then if there is, set the source to the first group of that
#do this for the source and the target. THANKS Rama :)
#print(notExamplesRE.search(source_long_name).group(1))
#print("source short name: " + source+ "###")
#source = notExamplesRE.search(source_long_name).group(1)
#print("source long name: " + source_long_name)
target_long_name = row["Response"]
target = target_long_name.rstrip()
if notExamplesRE.search(target_long_name) !=None:
target = notExamplesRE.search(target_long_name).group(1).rstrip() #rstip to remove trialling whitespace
#print("target long name: " + target_long_name)
#print("target short name: " + target+ "###")
if (source == "Other" and matrix == "impactXapproach"):
source = "Other Area of Impact"
source_long_name = "Other Area of Impact"
if (target == "Other" and matrix == "innovationXimpact" ):
target = "Other Area of Impact"
target_long_name = "Other Area of Impact"
else:
source = ""
target = ""
#print (m)
row.update({"Name of project":Name_of_Project,"Name of PI or project lead(s)":Name_of_PI_or_project_lead,"Please enter your name":EnterersName,"Type of project":Type_of_project,"Project stage":Project_Stage,"Year awarded":Year_Awarded,"Faculty_School":Faculty_School,"Department":Department,"Short description of project":Short_description_of_project,"Primary course format":Course_Format,"Course_Level":Course_Level,"Course type":Course_Type,"Enrolment cap":Enrolment_Cap,"Course location":Course_Location,"source":source,"source long name":source_long_name,"target":target, "target long name":target_long_name, "value":value, "matrix":matrix})
writer.writerow(row)
csvfile.close()
ofile.close()
|
py | b41659ac3f02b06bbb0178cb348fd1e4ce371a18 | import textwrap
import pytest
from conans.test.assets.cmake import gen_cmakelists
from conans.test.assets.pkg_cmake import pkg_cmake
from conans.test.assets.sources import gen_function_h, gen_function_cpp
from conans.test.utils.tools import TestClient
@pytest.fixture(scope="module")
def client_weird_lib_name():
c = TestClient()
conanfile = textwrap.dedent("""
import os, platform
from conans import ConanFile
from conan.tools.cmake import CMake, cmake_layout
class Pkg(ConanFile):
exports_sources = "CMakeLists.txt", "src/*"
settings = "os", "compiler", "arch", "build_type"
generators = "CMakeToolchain", "CMakeDeps"
def layout(self):
cmake_layout(self)
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
self.copy("*.h", dst="include", src="src")
self.copy("*.lib", dst="lib", keep_path=False)
self.copy("*.a", dst="lib", keep_path=False)
ext = "a" if platform.system() != "Windows" else "lib"
prefix = "lib" if platform.system() != "Windows" else ""
os.chdir(os.path.join(self.package_folder, "lib"))
os.rename("{}hello_0.1.{}".format(prefix, ext),
"{}[email protected].{}".format(prefix, ext))
def package_info(self):
self.cpp_info.libs = ["[email protected]"]
""")
hdr = gen_function_h(name="hello")
src = gen_function_cpp(name="hello")
cmake = gen_cmakelists(libname="hello_0.1", libsources=["src/hello.cpp"])
c.save({"src/hello.h": hdr,
"src/hello.cpp": src,
"CMakeLists.txt": cmake,
"conanfile.py": conanfile})
c.run("create . hello/0.1@")
return c
@pytest.mark.tool_cmake
def test_cmakedeps(client_weird_lib_name):
c = client_weird_lib_name
c.save(pkg_cmake("chat", "0.1", requires=["hello/0.1"]), clean_first=True)
c.run("create . chat/0.1@")
assert "chat/0.1: Created package" in c.out
# TODO: Remove in Conan 2.0
@pytest.mark.tool_cmake
def test_cmake_find_package(client_weird_lib_name):
c = client_weird_lib_name
files = pkg_cmake("chat", "0.1", requires=["hello/0.1"])
conanfile = textwrap.dedent("""
import os, platform
from conans import ConanFile, CMake
class Pkg(ConanFile):
exports_sources = "CMakeLists.txt", "src/*", "include/*"
settings = "os", "compiler", "arch", "build_type"
generators = "cmake_find_package"
requires = "hello/0.1"
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
""")
files["conanfile.py"] = conanfile
c.save(files, clean_first=True)
c.run("create . chat/0.1@")
assert "chat/0.1: Created package" in c.out
# TODO: Remove in Conan 2.0
@pytest.mark.tool_cmake
def test_cmake_find_package_multi(client_weird_lib_name):
c = client_weird_lib_name
files = pkg_cmake("chat", "0.1", requires=["hello/0.1"])
conanfile = textwrap.dedent("""
import os, platform
from conans import ConanFile, CMake
class Pkg(ConanFile):
exports_sources = "CMakeLists.txt", "src/*", "include/*"
settings = "os", "compiler", "arch", "build_type"
generators = "cmake_find_package_multi"
requires = "hello/0.1"
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
""")
files["conanfile.py"] = conanfile
c.save(files, clean_first=True)
c.run("create . chat/0.1@")
assert "chat/0.1: Created package" in c.out
|
py | b4165afebdf106505cc8a83a509c9c85b231c4d4 | # Generated by Django 2.1.5 on 2019-02-17 10:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shop', '0002_auto_20190210_0117'),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('msg_id', models.AutoField(primary_key=True, serialize=False)),
('name', models.CharField(max_length=50)),
('email', models.CharField(default='', max_length=70)),
('phone', models.CharField(default='', max_length=70)),
('desc', models.CharField(default='', max_length=500)),
],
),
]
|
py | b4165ca9c9e714bd671c8647e6d587d1d5084974 | """
EDID helper
"""
from subprocess import CalledProcessError, check_output
from typing import ByteString, List
__all__ = ["EdidHelper"]
class EdidHelper:
"""Class for working with EDID data"""
@staticmethod
def hex2bytes(hex_data: str) -> ByteString:
"""Convert hex EDID string to bytes
Args:
hex_data (str): hex edid string
Returns:
ByteString: edid byte string
"""
# delete edid 1.3 additional block
if len(hex_data) > 256:
hex_data = hex_data[:256]
numbers = []
for i in range(0, len(hex_data), 2):
pair = hex_data[i : i + 2]
numbers.append(int(pair, 16))
return bytes(numbers)
@classmethod
def get_edids(cls) -> List[ByteString]:
"""Get edids from xrandr
Raises:
`RuntimeError`: if error with retrieving xrandr util data
Returns:
List[ByteString]: list with edids
"""
try:
output = check_output(["xrandr", "--verbose"])
except (CalledProcessError, FileNotFoundError) as err:
raise RuntimeError(
"Error retrieving xrandr util data: {}".format(err)
) from None
edids = []
lines = output.splitlines()
for i, line in enumerate(lines):
line = line.decode().strip()
if line.startswith("EDID:"):
selection = lines[i + 1 : i + 9]
selection = list(s.decode().strip() for s in selection)
selection = "".join(selection)
bytes_section = cls.hex2bytes(selection)
edids.append(bytes_section)
return edids
|
py | b4165d37a8b383ded1cd5fabd53afe74f8d073a3 | from flask import Flask, g, request
import deepdream_test
app = Flask(__name__)
#@app.before_first_request
#def load_model():
# g.net = deepdream_test.make_net('../caffe/models/bvlc_googlenet/')
@app.route('/', methods=['GET', 'POST'])
def make_dream():
net = deepdream_test.make_net('../caffe/models/bvlc_googlenet/')
if request.method == 'POST' and len(request.files.keys()) > 0:
key = request.files.keys()[0]
filename = 'tmp/%s' % key
with open(filename, 'wb') as file:
file.write(request.files[key].read())
num_iterations = request.args.get('iters', 1)
inverse_gradient = request.args.get('inverse_gradient', 0)
if int(inverse_gradient):
inverse_gradient = True
else:
inverse_gradient = False
return deepdream_test.layerDream(net, filename, num_iterations, inverse_gradient)
#else if request.method == 'GET' and request.params['image_url']:
# TODO: add the ability to dreamify a url image
return 'No image found.'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=5000, debug=True)
|
py | b4165f24cafd0e9a3fc6069fd512046c20093533 | class Map:
def __init__(self, embeddings, file_paths):
self.list_em = [list(i) for i in embeddings]
self.file_paths = file_paths
def find_index(self, cluster):
cluster_indices = [self.list_em.index(list(embed)) for embed in cluster]
return cluster_indices
def find_file(self, row):
files = [self.file_paths[ind] for ind in row]
return files
|
py | b4165f6493c8742a2f854967f88622c02fdb3841 | # model settings
norm_cfg = dict(type='BN', requires_grad=True)
model = dict(
type='CascadeRCNN',
pretrained='open-mmlab://resnest50',
backbone=dict(
type='DetectoRS_ResNeSt',
stem_channels=64,
depth=50,
radix=2,
use_blur_pool=False,
reduction_factor=4,
avg_down_stride=True,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=norm_cfg,
dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False),
stage_with_dcn=(False, True, True, True),
norm_eval=True,
plugins=[
dict(
cfg=dict(type='ContextBlock', ratio=1. / 4),
stages=(False, False, True, True),
position='after_conv3')
],
output_img=True,
style='pytorch'),
neck=dict(
type='RFP',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
rfp_steps=2,
aspp_out_channels=64,
aspp_dilations=(1, 3, 6, 1),
rfp_backbone=dict(
rfp_inplanes=256,
type='DetectoRS_ResNeSt',
stem_channels=64,
depth=50,
radix=2,
use_blur_pool=False,
reduction_factor=4,
avg_down_stride=True,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=norm_cfg,
dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False),
stage_with_dcn=(False, True, True, True),
norm_eval=True,
plugins=[
dict(
cfg=dict(type='ContextBlock', ratio=1. / 4),
stages=(False, False, True, True),
position='after_conv3')
],
pretrained='open-mmlab://resnest50',
style='pytorch'),
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_generator=dict(
type='AnchorGenerator',
scales=[6],
ratios=[0.1, 0.5, 1.0, 2.0, 10.0],
strides=[4, 8, 16, 32, 64]),
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0]),
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
roi_head=dict(
type='CascadeRoIHead',
num_stages=3,
stage_loss_weights=[1, 0.5, 0.25],
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
conv_out_channels=256,
norm_cfg=norm_cfg,
num_classes=9,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
conv_out_channels=256,
norm_cfg=norm_cfg,
num_classes=9,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0,
loss_weight=1.0)),
dict(
type='Shared2FCBBoxHead',
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
conv_out_channels=256,
norm_cfg=norm_cfg,
num_classes=9,
bbox_coder=dict(
type='DeltaXYWHBBoxCoder',
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067]),
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
]))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.2,
min_pos_iou=0.2,
gpu_assign_thr=10,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.3,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=False,
gpu_assign_thr=10,
ignore_iof_thr=-1),
sampler=dict(
type='OHEMSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.4,
neg_iou_thr=0.4,
min_pos_iou=0.4,
match_low_quality=False,
gpu_assign_thr=10,
ignore_iof_thr=-1),
sampler=dict(
type='OHEMSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
match_low_quality=False,
gpu_assign_thr=10,
ignore_iof_thr=-1),
sampler=dict(
type='OHEMSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False)
])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.01,
nms=dict(type='nms', iou_threshold=0.1),
max_per_img=100))
# dataset setting
dataset_type = 'TileDataset'
data_root = 'data/data_guangdong/tile_round2/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
albu_train_transforms = [
dict(type='HorizontalFlip', p=0.5),
dict(type='VerticalFlip', p=0.5),
dict(
type='ShiftScaleRotate',
shift_limit=0.0625,
scale_limit=0.0,
rotate_limit=0,
interpolation=1,
p=0.5),
dict(
type='RandomBrightnessContrast',
brightness_limit=[0.1, 0.3],
contrast_limit=[0.1, 0.3],
p=0.2),
dict(
type='OneOf',
transforms=[
dict(
type='RGBShift',
r_shift_limit=10,
g_shift_limit=10,
b_shift_limit=10,
p=1.0),
dict(
type='HueSaturationValue',
hue_shift_limit=20,
sat_shift_limit=30,
val_shift_limit=20,
p=1.0)
],
p=0.1),
dict(type='JpegCompression', quality_lower=85, quality_upper=95, p=0.2),
dict(type='ChannelShuffle', p=0.1),
dict(type='RandomContrast', limit=0.2, always_apply=False, p=0.2),
dict(
type='OneOf',
transforms=[
dict(type='Blur', blur_limit=3, p=1.0),
dict(type='MedianBlur', blur_limit=3, p=1.0)
],
p=0.1),
]
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(
type='Resize',
img_scale=[(1300, 1300), (1500, 1500), (1700, 1700)],
multiscale_mode='value',
keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='RandomFlip', direction='diagonal', flip_ratio=0.5),
dict(type='MixUp', p=0.5, lambd=0.5),
dict(
type='Albu',
transforms=albu_train_transforms,
bbox_params=dict(
type='BboxParams',
format='pascal_voc',
label_fields=['gt_labels'],
min_visibility=0.0,
filter_lost_elements=True),
keymap={
'img': 'image',
'gt_bboxes': 'bboxes'
},
update_pad_shape=False,
skip_img_without_anno=True),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=[(1300, 1300), (1500, 1500), (1700, 1700)],
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
samples_per_gpu=1,
workers_per_gpu=3,
train=dict(
type=dataset_type,
ann_file=data_root + 'infos/train.pkl',
img_prefix=data_root + 'train_imgs/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'infos/val.pkl',
img_prefix=data_root + 'train_imgs/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'infos/val.pkl',
img_prefix=data_root + 'train_imgs/',
samples_per_gpu=1,
pipeline=test_pipeline)
# test=dict(
# type='TileTestDatasetV3',
# ann_file=
# '/ssd/huangyifei/data_guangdong/tile_round1_testA_20201231/croped_slide_test_win1650',
# # ann_file='/home/huangyifei/test',
# pipeline=test_pipeline,
# use_tile_test_dataset=True,
# samples_per_gpu=10)
)
evaluation = dict(interval=1, metric='mAP')
# optimizer
# optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer = dict(type='Adam', lr=7e-5)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[20, 27])
total_epochs = 30
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=10,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
log_level = 'INFO'
load_from = None
resume_from = 'work_dirs/round2/cascade_s50_rfp_mstrain_aug_gc_se/epoch_23.pth'
work_dir = 'work_dirs/round2/cascade_s50_rfp_mstrain_aug_gc_se'
workflow = [('train', 1)]
# fp16 settings
fp16 = dict(loss_scale=512.)
cudnn_benchmark = True
dist_params = dict(backend='nccl')
optimizer_config = dict(grad_clip=None) |
py | b4165f9b413f09c6e291a1034d53ad10ed5ca3ee | import numpy as np
from .data_types import InferOptions, spark_to_value_type
try:
import pyspark.sql.functions as funcs
except ImportError:
pass
def infer_schema_from_df_spark(
df,
features,
entities,
timestamp_key: str = None,
entity_columns=None,
options: InferOptions = InferOptions.Null,
):
timestamp_fields = []
current_entities = list(entities.keys())
entity_columns = entity_columns or []
def upsert_entity(name, value_type):
if name in current_entities:
entities[name].value_type = value_type
else:
entities[name] = {"name": name, "value_type": value_type}
for column, s in df.dtypes:
value_type = spark_to_value_type(s)
is_entity = column in entity_columns or column in current_entities
if is_entity:
upsert_entity(column, value_type)
elif options & InferOptions.Features and column != timestamp_key:
if column in features.keys():
features[column].value_type = value_type
else:
features[column] = {"name": column, "value_type": value_type}
if value_type == "timestamp" and not is_entity:
timestamp_fields.append(column)
return timestamp_key
def get_df_preview_spark(df, preview_lines=20):
"""capture preview data from spark df"""
df = df.limit(preview_lines)
values = [df.select(funcs.collect_list(val)).first()[0] for val in df.columns]
preview = [df.columns]
for row in list(zip(*values)):
preview.append(list(row))
return preview
def _create_hist_data(df, column, minim, maxim, bins=10):
def create_all_conditions(current_col, column, left_edges, count=1):
"""
Recursive function that exploits the
ability to call the Spark SQL Column method
.when() in a recursive way.
"""
left_edges = left_edges[:]
if len(left_edges) == 0:
return current_col
if len(left_edges) == 1:
next_col = current_col.when(
funcs.col(column) >= float(left_edges[0]), count
)
left_edges.pop(0)
return create_all_conditions(next_col, column, left_edges[:], count + 1)
next_col = current_col.when(
(float(left_edges[0]) <= funcs.col(column))
& (funcs.col(column) < float(left_edges[1])),
count,
)
left_edges.pop(0)
return create_all_conditions(next_col, column, left_edges[:], count + 1)
num_range = maxim - minim
bin_width = num_range / float(bins)
left_edges = [minim]
for _bin in range(bins):
left_edges = left_edges + [left_edges[-1] + bin_width]
left_edges.pop()
expression_col = funcs.when(
(float(left_edges[0]) <= funcs.col(column))
& (funcs.col(column) < float(left_edges[1])),
0,
)
left_edges_copy = left_edges[:]
left_edges_copy.pop(0)
bin_data = (
df.select(funcs.col(column))
.na.drop()
.select(
funcs.col(column),
create_all_conditions(expression_col, column, left_edges_copy).alias(
"bin_id"
),
)
.groupBy("bin_id")
.count()
).toPandas()
bin_data.index = bin_data["bin_id"]
new_index = list(range(bins))
bin_data = bin_data.reindex(new_index)
bin_data["bin_id"] = bin_data.index
bin_data = bin_data.fillna(0)
bin_data["left_edge"] = left_edges
bin_data["width"] = bin_width
bin_data = [
bin_data["count"].tolist(),
[round(x, 2) for x in bin_data["left_edge"].tolist()],
]
return bin_data
def get_dtype(df, colname):
return [dtype for name, dtype in df.dtypes if name == colname][0]
def get_df_stats_spark(df, options, num_bins=20, sample_size=None):
if InferOptions.get_common_options(options, InferOptions.Index):
df = df.select("*").withColumn("id", funcs.monotonically_increasing_id())
# todo: sample spark DF if sample_size is not None and DF is bigger than sample_size
summary_df = df.summary().toPandas()
summary_df.set_index(["summary"], drop=True, inplace=True)
results_dict = {}
for col, values in summary_df.items():
stats_dict = {}
for stat, val in values.dropna().items():
if stat != "50%":
if isinstance(val, (float, np.floating, np.float64)):
stats_dict[stat] = float(val)
elif isinstance(val, (int, np.integer, np.int64)):
# boolean values are considered subclass of int
if isinstance(val, bool):
stats_dict[stat] = bool(val)
else:
stats_dict[stat] = int(val)
else:
stats_dict[stat] = str(val)
results_dict[col] = stats_dict
if InferOptions.get_common_options(
options, InferOptions.Histogram
) and get_dtype(df, col) in ["double", "int"]:
try:
results_dict[col]["hist"] = _create_hist_data(
df,
col,
float(results_dict[col]["min"]),
float(results_dict[col]["max"]),
bins=num_bins,
)
except Exception:
pass
return results_dict
class SparkDataInfer:
infer_schema = infer_schema_from_df_spark
get_preview = get_df_preview_spark
get_stats = get_df_stats_spark
|
py | b416606be0153015093d57f7aa6c2ffbdff5098b | #
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
from netaddr import *
import json
import cfgm_common.exceptions
class AddrMgmtError(Exception):
pass
# end class AddrMgmtError
class AddrMgmtSubnetUndefined(AddrMgmtError):
def __init__(self, vn_fq_name):
self.vn_fq_name = vn_fq_name
# end __init__
def __str__(self):
return "Virtual-Network(%s) has no defined subnet(s)" %\
(self.vn_fq_name)
# end __str__
# end AddrMgmtSubnetUndefined
class AddrMgmtSubnetExhausted(AddrMgmtError):
def __init__(self, vn_fq_name, subnet_val):
self.vn_fq_name = vn_fq_name
self.subnet_val = subnet_val
# end __init__
def __str__(self):
return "Virtual-Network(%s) has exhausted subnet(%s)" %\
(self.vn_fq_name, self.subnet_val)
# end __str__
# end AddrMgmtSubnetExhausted
# Class to manage a single subnet
# maintain free list of IP addresses, exclude list and CIDR info
class Subnet(object):
"""Create a subnet with prefix and len
Gateway (if provided) is made unavailable for assignment.
Inuse mask represent addresses already assigned and in use during previous
incarnations of api server. These are also taken out of free pool to
prevent duplicate assignment.
"""
_db_conn = None
@classmethod
def set_db_conn(cls, db_conn):
cls._db_conn = db_conn
# end set_db_conn
def __init__(self, name, prefix, prefix_len, gw=None):
self._version = 0
"""
print 'Name = %s, prefix = %s, len = %s, gw = %s, db_conn = %s' \
% (name, prefix, prefix_len, gw, 'Yes' if db_conn else 'No')
"""
network = IPNetwork('%s/%s' % (prefix, prefix_len))
# Exclude host, broadcast and gateway addresses
exclude = [IPAddress(network.first), IPAddress(
network.last), network.broadcast]
if gw:
gw_ip = IPAddress(gw)
exclude.append(gw_ip)
else:
# reserve a gateway ip in subnet
gw_ip = IPAddress(network.last - 1)
exclude.append(gw_ip)
self._db_conn.subnet_create_allocator(name, network.first, network.last)
# reserve excluded addresses
for addr in exclude:
self._db_conn.subnet_alloc_req(name, int(addr))
self._name = name
self._network = network
self._exclude = exclude
self.gw_ip = gw_ip
# end __init__
@classmethod
def delete_cls(cls, subnet_name):
# deletes the index allocator
cls._db_conn.subnet_delete_allocator(subnet_name)
# end delete_cls
def get_name(self):
return self._name
#end get_name
def get_exclude(self):
return self._exclude
# end get_exclude
def ip_alloc(self, ipaddr=None):
req = None
if ipaddr:
ip = IPAddress(ipaddr)
req = int(ip)
addr = self._db_conn.subnet_alloc_req(self._name, req)
if addr:
return str(IPAddress(addr))
return None
# end ip_alloc
# free IP unless it is invalid, excluded or already freed
@classmethod
def ip_free_cls(cls, subnet_fq_name, ip_network, exclude_addrs, ip_addr):
if ((ip_addr in ip_network) and (ip_addr not in exclude_addrs)):
if cls._db_conn:
cls._db_conn.subnet_free_req(subnet_fq_name, int(ip_addr))
return True
return False
# end ip_free_cls
def ip_free(self, ip_addr):
Subnet.ip_free_cls(self._name, self._network, self._exclude, ip_addr)
# end ip_free
# check if IP address belongs to us
@classmethod
def ip_belongs_to(cls, ipnet, ipaddr):
return IPAddress(ipaddr) in ipnet
# end ip_belongs_to
def ip_belongs(self, ipaddr):
return self.ip_belongs_to(self._network, ipaddr)
# end ip_belongs
def set_version(self, version):
self._version = version
# end set_version
def get_version(self):
return self._version
# end get_version
# end class Subnet
# Address management for virtual network
class AddrMgmt(object):
def __init__(self, server_mgr):
#self.vninfo = {}
self.version = 0
self._server_mgr = server_mgr
self._db_conn = None
# dict of VN where each key has dict of subnets
self._subnet_objs = {}
# end __init__
def _get_db_conn(self):
if not self._db_conn:
self._db_conn = self._server_mgr.get_db_connection()
Subnet.set_db_conn(self._db_conn)
return self._db_conn
# end _get_db_conn
def _get_subnet_dicts(self, vn_fq_name, vn_dict=None):
db_conn = self._get_db_conn()
vn_uuid = db_conn.fq_name_to_uuid('virtual-network', vn_fq_name)
# Read in the VN details if not passed in
if not vn_dict:
(ok, result) = self._db_conn.dbe_read(
obj_type='virtual-network',
obj_ids={'uuid': vn_uuid},
obj_fields=['network_ipam_refs'])
if not ok:
raise VncError(result)
vn_dict = result
vn_fq_name_str = ':'.join(vn_fq_name)
ipam_refs = vn_dict.get('network_ipam_refs', [])
# gather all subnets, return dict keyed by name
subnet_dicts = {}
for ipam_ref in ipam_refs:
vnsn_data = ipam_ref['attr']
ipam_subnets = vnsn_data['ipam_subnets']
for ipam_subnet in ipam_subnets:
subnet_dict = ipam_subnet['subnet']
subnet_dict['gw'] = ipam_subnet['default_gateway']
subnet_name = subnet_dict['ip_prefix'] + '/' + str(
subnet_dict['ip_prefix_len'])
subnet_dicts[subnet_name] = subnet_dict
return subnet_dicts
# end _get_subnet_dicts
def _create_subnet_objs(self, vn_fq_name_str, vn_dict):
self._subnet_objs[vn_fq_name_str] = {}
# create subnet for each new subnet
refs = vn_dict.get('network_ipam_refs', None)
if refs:
for ref in refs:
ipam_fq_name_str = ':'.join(ref['to'])
vnsn_data = ref['attr']
ipam_subnets = vnsn_data['ipam_subnets']
for ipam_subnet in ipam_subnets:
subnet = ipam_subnet['subnet']
subnet_name = subnet['ip_prefix'] + '/' + str(
subnet['ip_prefix_len'])
gateway_ip = ipam_subnet.get('default_gateway', None)
subnet_obj = Subnet(
'%s:%s' % (vn_fq_name_str, subnet_name),
subnet['ip_prefix'], str(subnet['ip_prefix_len']),
gw=gateway_ip)
self._subnet_objs[vn_fq_name_str][subnet_name] = \
subnet_obj
ipam_subnet['default_gateway'] = str(subnet_obj.gw_ip)
# end _create_subnet_objs
def net_create_req(self, obj_dict):
self._get_db_conn()
vn_fq_name_str = ':'.join(obj_dict['fq_name'])
self._create_subnet_objs(vn_fq_name_str, obj_dict)
# end net_create_req
def net_create_notify(self, obj_ids, obj_dict):
db_conn = self._get_db_conn()
try:
(ok, result) = db_conn.dbe_read(
'virtual-network',
obj_ids={'uuid': obj_ids['uuid']},
obj_fields=['fq_name', 'network_ipam_refs'])
except cfgm_common.exceptions.NoIdError:
return
if not ok:
print "Error: %s in net_create_notify" %(result)
return
vn_dict = result
vn_fq_name_str = ':'.join(vn_dict['fq_name'])
self._create_subnet_objs(vn_fq_name_str, vn_dict)
# end net_create_notify
def net_update_req(self, vn_fq_name, db_vn_dict, req_vn_dict, obj_uuid=None):
# ideally 3 way sync/audit needed here. DB to what we is in subnet_objs
# DB to what is in request. To simplify blow away subnet_objs and do
# sync only from DB to request.
vn_fq_name_str = ':'.join(vn_fq_name)
try:
del self._subnet_objs[vn_fq_name_str]
except KeyError:
pass
db_subnet_dicts = self._get_subnet_dicts(vn_fq_name, db_vn_dict)
req_subnet_dicts = self._get_subnet_dicts(vn_fq_name, req_vn_dict)
db_subnet_names = set([sname for sname in db_subnet_dicts])
req_subnet_names = set([sname for sname in req_subnet_dicts])
del_subnet_names = db_subnet_names - req_subnet_names
add_subnet_names = req_subnet_names - db_subnet_names
for subnet_name in del_subnet_names:
Subnet.delete_cls('%s:%s' % (vn_fq_name_str, subnet_name))
self._create_subnet_objs(vn_fq_name_str, req_vn_dict)
# end net_update_req
def net_update_notify(self, obj_ids):
db_conn = self._get_db_conn()
try:
(ok, result) = db_conn.dbe_read(
obj_type='virtual-network',
obj_ids={'uuid': obj_ids['uuid']},
obj_fields=['fq_name', 'network_ipam_refs'])
except cfgm_common.exceptions.NoIdError:
return
if not ok:
print "Error: %s in net_update_notify" %(result)
return
vn_dict = result
vn_fq_name_str = ':'.join(vn_dict['fq_name'])
try:
del self._subnet_objs[vn_fq_name_str]
except KeyError:
pass
self._create_subnet_objs(vn_fq_name_str, vn_dict)
# end net_update_notify
# purge all subnets associated with a virtual network
def net_delete_req(self, obj_dict):
vn_fq_name = obj_dict['fq_name']
vn_fq_name_str = ':'.join(vn_fq_name)
subnet_dicts = self._get_subnet_dicts(vn_fq_name)
for subnet_name in subnet_dicts:
Subnet.delete_cls('%s:%s' % (vn_fq_name_str, subnet_name))
try:
vn_fq_name_str = ':'.join(vn_fq_name)
del self._subnet_objs[vn_fq_name_str]
except KeyError:
pass
# end net_delete_req
def net_delete_notify(self, obj_ids, obj_dict):
try:
vn_fq_name_str = ':'.join(obj_dict['fq_name'])
del self._subnet_objs[vn_fq_name_str]
except KeyError:
pass
# end net_delete_notify
def _vn_to_subnets(self, obj_dict):
# given a VN return its subnets in list of net/prefixlen strings
ipam_refs = obj_dict.get('network_ipam_refs', None)
if ipam_refs != None:
subnet_list = []
for ref in ipam_refs:
vnsn_data = ref['attr']
ipam_subnets = vnsn_data['ipam_subnets']
for ipam_subnet in ipam_subnets:
subnet = ipam_subnet['subnet']
subnet_name = subnet['ip_prefix'] + '/' + str(
subnet['ip_prefix_len'])
subnet_list.append(subnet_name)
else:
subnet_list = None
return subnet_list
# end _vn_to_subnets
# check subnets associated with a virtual network, return error if
# any two subnets have overlap ip addresses
def net_check_subnet_overlap(self, db_vn_dict, req_vn_dict):
# get all subnets existing + requested and check any non-exact overlaps
requested_subnets = self._vn_to_subnets(req_vn_dict)
if not requested_subnets:
return True, ""
existing_subnets = self._vn_to_subnets(db_vn_dict)
if not existing_subnets:
existing_subnets = []
# literal/string sets
# eg. existing [1.1.1.0/24],
# requested [1.1.1.0/24, 2.2.2.0/24] OR
# requested [1.1.1.0/16, 2.2.2.0/24]
existing_set = set([sn for sn in existing_subnets])
requested_set = set([sn for sn in requested_subnets])
new_set = requested_set - existing_set
# IPSet to find any overlapping subnets
overlap_set = IPSet(existing_set) & IPSet(new_set)
if overlap_set:
err_msg = "Overlapping addresses between requested and existing: "
return False, err_msg + str(overlap_set)
return True, ""
# end net_check_subnet_overlap
# check subnets associated with a virtual network, return error if
# any subnet is being deleted and has backref to instance-ip/floating-ip
def net_check_subnet_delete(self, db_vn_dict, req_vn_dict):
db_conn = self._get_db_conn()
# if all instance-ip/floating-ip are part of requested list
# things are ok.
# eg. existing [1.1.1.0/24, 2.2.2.0/24],
# requested [1.1.1.0/24] OR
# requested [1.1.1.0/28, 2.2.2.0/24]
requested_subnets = self._vn_to_subnets(req_vn_dict)
if requested_subnets == None:
# subnets not modified in request
return True, ""
# if all subnets are being removed, check for any iip backrefs
# or floating pools still present in DB version of VN
if len(requested_subnets) == 0:
if db_vn_dict.get('instance_ip_back_refs'):
return False, "Cannot Delete IP Block, Instance IP(s) in use"
if db_vn_dict.get('floating_ip_pools'):
return False, "Cannot Delete IP Block, Floating Pool(s) in use"
instip_refs = db_vn_dict.get('instance_ip_back_refs', [])
for ref in instip_refs:
try:
(ok, result) = db_conn.dbe_read(
'instance-ip', {'uuid': ref['uuid']})
except cfgm_common.exceptions.NoIdError:
continue
if not ok:
continue
inst_ip = result.get('instance_ip_address', None)
if not all_matching_cidrs(inst_ip, requested_subnets):
return False,\
"Cannot Delete IP Block, IP(%s) is in use"\
% (inst_ip)
fip_pool_refs = db_vn_dict.get('floating_ip_pools', [])
for ref in fip_pool_refs:
try:
(ok, result) = db_conn.dbe_read(
'floating-ip-pool', {'uuid': ref['uuid']})
except cfgm_common.exceptions.NoIdError:
continue
if not ok:
continue
floating_ips = result.get('floating_ips', [])
for floating_ip in floating_ips:
# get floating_ip_address and this should be in
# new subnet_list
try:
(read_ok, read_result) = db_conn.dbe_read(
'floating-ip', {'uuid': floating_ip['uuid']})
except cfgm_common.exceptions.NoIdError:
continue
if not ok:
continue
fip_addr = read_result.get('floating_ip_address', None)
if not all_matching_cidrs(fip_addr, requested_subnets):
return False,\
"Cannot Delete IP Block, Floating IP(%s) is in use"\
% (fip_addr)
return True, ""
# end net_check_subnet_delete
# allocate an IP address for given virtual network
# we use the first available subnet unless provided
def ip_alloc_req(self, vn_fq_name, sub=None, asked_ip_addr=None):
vn_fq_name_str = ':'.join(vn_fq_name)
subnet_dicts = self._get_subnet_dicts(vn_fq_name)
if not subnet_dicts:
raise AddrMgmtSubnetUndefined(vn_fq_name_str)
for subnet_name in subnet_dicts:
if sub and sub != subnet_name:
continue
# create subnet_obj internally if it was created by some other
# api-server before
try:
subnet_obj = self._subnet_objs[vn_fq_name_str][subnet_name]
except KeyError:
if vn_fq_name_str not in self._subnet_objs:
self._subnet_objs[vn_fq_name_str] = {}
subnet_dict = subnet_dicts[subnet_name]
subnet_obj = Subnet('%s:%s' % (vn_fq_name_str,
subnet_name),
subnet_dict['ip_prefix'],
subnet_dict['ip_prefix_len'],
gw=subnet_dict['gw'])
self._subnet_objs[vn_fq_name_str][subnet_name] = subnet_obj
if asked_ip_addr and not subnet_obj.ip_belongs(asked_ip_addr):
continue
ip_addr = subnet_obj.ip_alloc(ipaddr=asked_ip_addr)
if ip_addr is not None or sub:
return ip_addr
raise AddrMgmtSubnetExhausted(vn_fq_name, 'all')
# end ip_alloc_req
def ip_alloc_notify(self, ip_addr, vn_fq_name):
vn_fq_name_str = ':'.join(vn_fq_name)
subnet_dicts = self._get_subnet_dicts(vn_fq_name)
for subnet_name in subnet_dicts:
# create subnet_obj internally if it was created by some other
# api-server before
try:
subnet_obj = self._subnet_objs[vn_fq_name_str][subnet_name]
except KeyError:
if vn_fq_name_str not in self._subnet_objs:
self._subnet_objs[vn_fq_name_str] = {}
subnet_dict = subnet_dicts[subnet_name]
subnet_obj = Subnet('%s:%s' % (vn_fq_name_str,
subnet_name),
subnet_dict['ip_prefix'],
subnet_dict['ip_prefix_len'],
gw=subnet_dict['gw'])
self._subnet_objs[vn_fq_name_str][subnet_name] = subnet_obj
if not subnet_obj.ip_belongs(ip_addr):
continue
ip_addr = subnet_obj.ip_alloc(ipaddr=ip_addr)
break
# end ip_alloc_notify
def ip_free_req(self, ip_addr, vn_fq_name, sub=None):
vn_fq_name_str = ':'.join(vn_fq_name)
subnet_dicts = self._get_subnet_dicts(vn_fq_name)
for subnet_name in subnet_dicts:
if sub and sub != subnet_name:
continue
# if we have subnet_obj free it via instance method,
# updating inuse bitmask, else free it via class method
# and there is no inuse bitmask to worry about
try:
subnet_obj = self._subnet_objs[vn_fq_name_str][subnet_name]
except KeyError:
if vn_fq_name_str not in self._subnet_objs:
self._subnet_objs[vn_fq_name_str] = {}
subnet_dict = subnet_dicts[subnet_name]
subnet_obj = Subnet('%s:%s' % (vn_fq_name_str,
subnet_name),
subnet_dict['ip_prefix'],
subnet_dict['ip_prefix_len'],
gw=subnet_dict['gw'])
self._subnet_objs[vn_fq_name_str][subnet_name] = subnet_obj
if Subnet.ip_belongs_to(IPNetwork(subnet_name),
IPAddress(ip_addr)):
subnet_obj.ip_free(IPAddress(ip_addr))
break
# end ip_free_req
def ip_free_notify(self, ip_addr, vn_fq_name):
vn_fq_name_str = ':'.join(vn_fq_name)
subnet_dicts = self._get_subnet_dicts(vn_fq_name)
for subnet_name in subnet_dicts:
try:
subnet_obj = self._subnet_objs[vn_fq_name_str][subnet_name]
except KeyError:
if vn_fq_name_str not in self._subnet_objs:
self._subnet_objs[vn_fq_name_str] = {}
subnet_dict = subnet_dicts[subnet_name]
subnet_obj = Subnet('%s:%s' % (vn_fq_name_str,
subnet_name),
subnet_dict['ip_prefix'],
subnet_dict['ip_prefix_len'],
gw=subnet_dict['gw'])
self._subnet_objs[vn_fq_name_str][subnet_name] = subnet_obj
if Subnet.ip_belongs_to(IPNetwork(subnet_name),
IPAddress(ip_addr)):
subnet_obj.ip_free(IPAddress(ip_addr))
break
# end ip_free_notify
# Given IP address count on given virtual network, subnet/List of subnet
def ip_count(self, obj_dict, subnet=None):
db_conn = self._get_db_conn()
addr_num = 0
if not subnet:
return addr_num
instip_refs = obj_dict.get('instance_ip_back_refs', None)
if instip_refs:
for ref in instip_refs:
uuid = ref['uuid']
try:
(ok, result) = db_conn.dbe_read(
'instance-ip', {'uuid': uuid})
except cfgm_common.exceptions.NoIdError:
continue
if not ok:
continue
inst_ip = result.get('instance_ip_address', None)
if IPAddress(inst_ip) in IPNetwork(subnet):
addr_num += 1
return addr_num
# end ip_count
def mac_alloc(self, obj_dict):
uid = obj_dict['uuid']
return '02:%s:%s:%s:%s:%s' % (uid[0:2], uid[2:4], uid[4:6],
uid[6:8], uid[9:11])
# end mac_alloc
# end class AddrMgmt
|
py | b41660b911e334755a10d6c18110562f7e9eb7d0 | """
WSGI config for liveweather project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'liveweather.settings')
application = get_wsgi_application()
|
py | b41660d7e2b149d11909a1574f177e3a780747f1 | # Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for mail_editor.py."""
import datetime
import re
import django.utils.translation
from google.appengine.api import mail
from google.appengine.api import users
from nose.tools import assert_raises
import cache
import export_test
import mail_editor
from feedlib.errors import *
from feedlib.xml_utils import Struct
from mail_editor import NO_CHANGE
from mail_editor_errors import AmbiguousUpdateNotice, BadValueNotice
from medium_test_case import MediumTestCase
from model import Account, Attribute, MailUpdateText, Message, MinimalSubject
from model import Subdomain, Subject, SubjectType
from setup import setup_mail_update_texts, setup_subdomains, setup_subject_types
from utils import db
SAMPLE_EMAIL_WORKING = '''UPDATE title_foo (example.org/123)
Available beds: 18
Total beds:222
Email [email protected]
Commune foo@bar!
Can pick_up patients yes'''
SAMPLE_EMAIL_AUTHENTICATION = '''nickname nickname_foo
affiliation affiliation_foo
UPDATE title_foo (example.org/123)
Available beds 18
Total beds 222
Email [email protected]
Commune foo@bar!
Can pick up patients yes'''
SAMPLE_EMAIL_AUTHENTICATION2 = '''nickname: nickname_foo
affiliation: affiliation_foo
update title_foo (example.org/123)
Available beds 18'''
SAMPLE_EMAIL_PARTIAL_AUTHENTICATION = 'nickname nickname_foo'
SAMPLE_EMAIL_BROKEN = '''UPDATE title_foo (example.org/123)
Available beds d
Total beds 222
Email [email protected]
Commune foo@bar!
Can pick up patients yes'''
SAMPLE_EMAIL_QUOTED = '''>> UPDATE title_foo (example.org/123)
>> Available beds 18
>> Total beds 222
>> Email [email protected]
>> Commune foo@bar!
>> Can pick up patients yes'''
SAMPLE_EMAIL_STOP = '''UPDATE title_foo (example.org/123)
Available beds 18
Total beds 222
Email [email protected]
Commune foo@bar!
--- --- --- ---
Can pick up patients yes'''
SAMPLE_EMAIL_MIXED = '''UPDATE title_foo (example.org/123)
Available beds 18
Total beds 222
Email [email protected]
Commune foo@bar!
Can pick up patients yes
>> UPDATE title_foo (example.org/123)
>> Available beds d
>> Total beds 222
>> Email [email protected]
>> Commune foo@bar!
>> Can pick up patients yes'''
SAMPLE_EMAIL_MULTIPLE = '''UPDATE title_foo (example.org/123)
Available beds 18
Total beds 222
Email [email protected]
Commune foo@bar!
Can pick up patients yes
UPDATE title_bar
Available beds 20'''
SAMPLE_EMAIL_AMBIGUOUS = '''UPDATE title_foobar
Total beds 77'''
SAMPLE_EMAIL_AMBIGUOUS_WITH_KEYS = '''update title_foobar (example.org/789)
Total beds 77
update title_foobar (example.org/012)
total beds 76'''
SAMPLE_EMAIL_AMBIGUOUS_UPDATE_WORKING = '''update title_foo
commune: code 1
commune code: 1'''
SAMPLE_EMAIL_AMBIGUOUS_UPDATE_BROKEN = '''update title_foo
commune code 1'''
SAMPLE_EMAIL_ENUMS = '''update title_foo
services: -x-ray, +general surgery
operational status:operational'''
SAMPLE_EMAIL_ENUMS2 = '''update title_foo
services: -general surgery, ct scan'''
SAMPLE_EMAIL_ENUMS3 = '''update title_foo
services: general surgery, +x-ray'''
SAMPLE_EMAIL_ENUMS4 = '''update title_foo
services: -general surgery, -x-ray, -ct scan'''
SAMPLE_EMAIL_ENUMS_OVERWRITE = '''update title_foo
services: xray'''
SAMPLE_EMAIL_ENUMS_WITH_ERRORS = '''update title_foo
services: x'''
SAMPLE_EMAIL_WITH_ABBREVIATION = '''update title_foo
tb: 9999'''
SAMPLE_EMAIL_WITH_ABBREVIATION2 = '''update title_foo
tb: 9999
ab: 10000
serv: ct, gen surgery, xray
op status: operational'''
SAMPLE_EMAIL_WITH_AMBIGUOUS_MAP = '''update title_foo
total beds 8888'''
class MailEditorTest(MediumTestCase):
def setUp(self):
MediumTestCase.setUp(self)
django.utils.translation.activate('en')
cache.flush_all()
self.email = '[email protected]'
self.user = users.User(self.email)
self.account = Account(email=self.email, actions=['*:*'],
locale='en', nickname='nickname_foo',
affiliation='affiliation_foo')
self.subject = Subject(key_name='haiti:example.org/123',
type='hospital', author=self.user,
title__='title_foo', healthc_id__='123',
services__=['X_RAY'])
self.subject2 = Subject(key_name='haiti:example.org/456',
type='hospital', author=self.user,
title__='title_bar', healthc_id__='456')
self.subject3 = Subject(key_name='haiti:example.org/789',
type='hospital', author=self.user,
title__='title_foobar', healthc_id__='789')
self.subject4 = Subject(key_name='haiti:example.org/012',
type='hospital', author=self.user,
title__='title_foobar', healthc_id__='012')
self.ms = MinimalSubject.create(self.subject)
self.ms.set_attribute('title', 'title_foo')
self.ms2 = MinimalSubject.create(self.subject2)
self.ms2.set_attribute('title', 'title_bar')
self.ms3 = MinimalSubject.create(self.subject3)
self.ms3.set_attribute('title', 'title_foobar')
self.ms4 = MinimalSubject.create(self.subject4)
self.ms4.set_attribute('title', 'title_foobar')
attribute_names = export_test.STR_FIELDS + \
export_test.INT_FIELDS + \
export_test.BOOL_FIELDS.keys() + \
export_test.SELECT_FIELDS.keys() + \
['services']
self.subject_type = SubjectType(key_name='haiti:hospital',
attribute_names=attribute_names)
self.subdomain = Subdomain(key_name='haiti')
db.put([self.account, self.subject, self.subject2, self.subject3,
self.subject4, self.subject_type, self.subdomain,
self.ms, self.ms2, self.ms3, self.ms4])
for field in export_test.STR_FIELDS:
Attribute(key_name=field, type='str').put()
for field in export_test.INT_FIELDS:
Attribute(key_name=field, type='int').put()
for field in export_test.BOOL_FIELDS:
Attribute(key_name=field, type='bool').put()
for field in export_test.SELECT_FIELDS:
Attribute(key_name=field, type='choice',
values=['OPERATIONAL']).put()
Attribute(key_name='services', type='multi',
values=export_test.SERVICES).put()
Attribute(key_name='location', type='geopt').put()
Message(ns='attribute_value', en='X-Ray', name='X_RAY').put()
Message(ns='attribute_value', en='General Surgery',
name='GENERAL_SURGERY').put()
Message(ns='attribute_value', en='CT Scan', name='CT_SCAN').put()
Message(ns='attribute_value', en='Operational',
name='OPERATIONAL').put()
Message(ns='attribute_name', en='Available beds',
name='available_beds').put()
Message(ns='attribute_name', en='Email', name='email').put()
Message(ns='attribute_name', en='Commune', name='commune').put()
Message(ns='attribute_name', en='Services', name='services').put()
Message(ns='attribute_name', en='Total beds',
name='total_beds').put()
Message(ns='attribute_name', en='Can pick up patients',
name='can_pick_up_patients').put()
Message(ns='attribute_name', en='Commune code',
name='commune_code').put()
Message(ns='attribute_name', en='Operational status',
name='operational_status').put()
setup_mail_update_texts()
def tearDown(self):
db.delete([self.account, self.subject, self.subject2, self.subject3,
self.subject4, self.subject_type, self.subdomain,
self.ms, self.ms2, self.ms3, self.ms4])
for attribute in Attribute.all():
db.delete(attribute)
for attr_map in MailUpdateText.all():
db.delete(attr_map)
for message in Message.all():
db.delete(message)
def test_parse(self):
"""Confirm that the parse function properly translates string values
into datastore-friendly values."""
# test an int attribute
attribute = Attribute.get_by_key_name('available_beds')
update = '222'
assert mail_editor.parse(attribute, update) == (222, None)
# make sure it returns an error when an int is expected but not received
update = 'd'
value, notice = mail_editor.parse(attribute, update)
assert value is NO_CHANGE
assert isinstance(notice, BadValueNotice)
assert notice.update_text == update
# test a string attribute
attribute = Attribute.get_by_key_name('organization')
update = 'organization_foo'
assert (mail_editor.parse(attribute, update) ==
('organization_foo', None))
# test like attributes names
attribute = Attribute.get_by_key_name('commune_code')
update = '12345'
assert mail_editor.parse(attribute, update) == (12345, None)
# test a bool attribute
attribute = Attribute.get_by_key_name('reachable_by_road')
update = 'y'
assert mail_editor.parse(attribute, update) == (True, None)
update = 'yEs'
assert mail_editor.parse(attribute, update) == (True, None)
update = 'no'
assert mail_editor.parse(attribute, update) == (False, None)
update = '!'
value, notice = mail_editor.parse(attribute, update)
assert value is NO_CHANGE
assert isinstance(notice, BadValueNotice)
assert notice.update_text == update
# test a geopt attribute
attribute = Attribute.get_by_key_name('location')
update = '18.5, 18'
assert mail_editor.parse(attribute, update) == (
db.GeoPt(18.5, 18), None)
update = '0, 0'
assert mail_editor.parse(attribute, update) == (
db.GeoPt(0, 0), None)
update = '0,0'
assert mail_editor.parse(attribute, update) == (
db.GeoPt(0, 0), None)
update = '18.5'
value, notice = mail_editor.parse(attribute, update)
assert value is NO_CHANGE
assert isinstance(notice, BadValueNotice)
assert notice.update_text == update
update = '18.5, 18, 17.5'
value, notice = mail_editor.parse(attribute, update)
assert value is NO_CHANGE
assert isinstance(notice, BadValueNotice)
assert notice.update_text == update
update = 'a,b'
value, notice = mail_editor.parse(attribute, update)
assert value is NO_CHANGE
assert isinstance(notice, BadValueNotice)
assert notice.update_text == update
# test a choice attribute
attribute = Attribute.get_by_key_name('operational_status')
update = 'operational'
assert mail_editor.parse(attribute, update) == ('OPERATIONAL', None)
update = 'foo'
value, notice = mail_editor.parse(attribute, update)
assert value is NO_CHANGE
assert isinstance(notice, BadValueNotice)
assert notice.update_text == update
# test a multi attribute
attribute = Attribute.get_by_key_name('services')
update = 'general surgery, -x-ray'
assert mail_editor.parse(attribute, update) == (
(['X_RAY'], ['GENERAL_SURGERY']), [])
update += ', x'
value, notice = mail_editor.parse(attribute, update)
assert value == (['X_RAY'], ['GENERAL_SURGERY'])
assert isinstance(notice, BadValueNotice)
assert notice.update_text == 'x'
# test a value being set to null
update = '*none'
assert mail_editor.parse(attribute, update) == (None, None)
# test an unfound value
update = ''
assert mail_editor.parse(attribute, update) == (NO_CHANGE, None)
def test_mail_editor_have_profile_info(self):
"""Confirms that have_profile_info() identifies existing users."""
message = mail_editor.mail.EmailMessage(
sender=self.account.email,
to='[email protected]',
subject='Resource Finder Updates',
body=SAMPLE_EMAIL_WORKING)
mail_editor_ = mail_editor.MailEditor()
mail_editor_.request = Struct(headers={'Host': 'localhost:80'})
mail_editor_.init(message)
mail_editor_.email = '[email protected]'
assert mail_editor_.have_profile_info()
mail_editor_.account.nickname = ''
assert not mail_editor_.have_profile_info()
mail_editor_.account.affiliation = ''
assert not mail_editor_.have_profile_info()
mail_editor_.account.nickname = 'nickname_foo'
assert not mail_editor_.have_profile_info()
db.delete(self.account)
assert not mail_editor_.have_profile_info()
def test_mail_editor_check_and_store_profile_info(self):
"""Confirm that check_and_store_profile_info() identifies messages sent
with authentication information for the user."""
message = mail_editor.mail.EmailMessage(
sender=self.account.email,
to='[email protected]',
subject='Resource Finder Updates',
# check authentication without colons
body=SAMPLE_EMAIL_AUTHENTICATION)
mail_editor_ = mail_editor.MailEditor()
mail_editor_.request = Struct(headers={'Host': 'localhost:8080'})
mail_editor_.init(message)
mail_editor_.account = None
assert mail_editor_.check_and_store_profile_info(message)
# check authentication with colons
message.body = SAMPLE_EMAIL_AUTHENTICATION2
assert mail_editor_.check_and_store_profile_info(message)
message.body=SAMPLE_EMAIL_WORKING
mail_editor_.account = None
assert not mail_editor_.check_and_store_profile_info(message)
def test_mail_editor_receive(self):
"""Confirm that it receives and properly sends an email with the
information from the received update."""
def incr_msg_time(msg):
"""Used to increment the date of an InboundEmailMessage by 1
minute. Needed in order to change the same attribute twice
with the same message."""
minute_idx = msg.date.rfind(':') + 1
minute = msg.date[minute_idx:minute_idx + 2]
msg.date = msg.date.replace(minute, str((int(minute) + 1) % 60))
return msg
self.sent_messages = []
message = mail.InboundEmailMessage(
sender=self.account.email,
to='[email protected]',
subject='Resource Finder Updates',
body=SAMPLE_EMAIL_WORKING,
date='Wed, 04 Aug 2010 13:07:18 -0400')
request = Struct(url='test/path', path='/path',
headers={'Host': 'localhost:8080'},
domain='localhost:8080')
mail_editor_ = mail_editor.MailEditor()
mail_editor_.request = request
num_emails = 0
# check working update email
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
self.check_for_correct_update(body, self.sent_messages[num_emails])
num_emails += 1
assert len(self.sent_messages) == num_emails
# check broken email
message.body = SAMPLE_EMAIL_BROKEN
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' in self.sent_messages[num_emails].subject()
assert '"Available beds" requires a numerical value.' in body
assert body.count('--- --- --- ---') == 2
assert 'REFERENCE DOCUMENT' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check working quoted email
message.body = SAMPLE_EMAIL_QUOTED
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
self.check_for_correct_update(body, self.sent_messages[num_emails])
num_emails += 1
assert len(self.sent_messages) == num_emails
# check working mixed email. should ignore the error in the quoted area
message.body = SAMPLE_EMAIL_MIXED
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
self.check_for_correct_update(body, self.sent_messages[num_emails])
num_emails += 1
assert len(self.sent_messages) == num_emails
db.delete(self.account)
# check working but not authenticated email
message.body = SAMPLE_EMAIL_WORKING
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert mail_editor_.need_profile_info
assert 'nickname' in body
assert 'affiliation' in body
assert 'Pending updates' in body
assert not Account.all().get()
num_emails += 1
assert len(self.sent_messages) == num_emails
# send it an authentication email
message.body = SAMPLE_EMAIL_AUTHENTICATION
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert Account.all().get()
assert not mail_editor_.need_profile_info
self.check_for_correct_update(body, self.sent_messages[num_emails])
num_emails += 1
assert len(self.sent_messages) == num_emails
# do same with an already existing account sans nickname/affiliation
self.account.nickname = None
self.account.affiliation = None
db.put(self.account)
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert Account.all().get()
assert not mail_editor_.need_profile_info
self.check_for_correct_update(body, self.sent_messages[num_emails])
num_emails += 1
assert len(self.sent_messages) == num_emails
# check working email with stop delimeter
message.body = SAMPLE_EMAIL_STOP
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert not 'update title_foo' in body
assert 'title_foo' in body
assert 'Available beds' in body and '18' in body
assert 'Total beds' in body and '22' in body
assert 'Email' in body and '[email protected]' in body
assert 'Commune' in body and 'foo@bar!' in body
assert 'Can pick up patients' not in body and 'yes' not in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check email with multiple subjects
message.body = SAMPLE_EMAIL_MULTIPLE
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'title_foo' in body and 'title_bar' in body
assert 'update title_foo' not in body
assert 'update title_bar' not in body
assert 'Available beds' in body and '18' in body and '20' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check email with an ambiguous subject
message.body = SAMPLE_EMAIL_AMBIGUOUS
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' in self.sent_messages[num_emails].subject()
assert 'title_foobar' in body and 'ambiguous' in body
assert 'Try again with one of the following' in body
assert 'example.org/789' in body
assert 'example.org/012' in body
assert 'Total beds 77' in body
assert 'REFERENCE DOCUMENT' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check email with multiple same title'd facilities [and unique keys]
message.body = SAMPLE_EMAIL_AMBIGUOUS_WITH_KEYS
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' not in self.sent_messages[num_emails].subject()
assert 'title_foobar' in body and '789' in body and '012' in body
assert 'Total beds' in body and '77' in body and '76' in body
assert 'REFERENCE DOCUMENT' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check email with correct [though potentially ambiguous] update details
message.body = SAMPLE_EMAIL_AMBIGUOUS_UPDATE_WORKING
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' not in self.sent_messages[num_emails].subject()
assert 'title_foo' in body
assert 'Commune' in body and 'code 1' in body
assert 'Commune code' in body and '1' in body
assert 'REFERENCE DOCUMENT' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check email with incorrect / ambiguous update details
message.body = SAMPLE_EMAIL_AMBIGUOUS_UPDATE_BROKEN
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' in self.sent_messages[num_emails].subject()
assert 'Attribute name is ambiguous' in body
assert 'Commune:' in body and 'Commune code:' in body
assert 'REFERENCE DOCUMENT' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check email with enums
message.body = SAMPLE_EMAIL_ENUMS
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' not in self.sent_messages[num_emails].subject()
assert 'Services' in body and 'General Surgery' in body
assert 'X-Ray' not in body
assert 'Operational status' in body and 'Operational' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
message.body = SAMPLE_EMAIL_ENUMS2
message = incr_msg_time(message)
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' not in self.sent_messages[num_emails].subject()
assert 'Services' in body and 'CT Scan' in body
assert 'General Surgery' not in body
num_emails += 1
assert len(self.sent_messages) == num_emails
message.body = SAMPLE_EMAIL_ENUMS3
message = incr_msg_time(message)
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' not in self.sent_messages[num_emails].subject()
assert 'Services' in body and 'CT Scan' in body
assert 'General Surgery' in body and 'X-Ray' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
message.body = SAMPLE_EMAIL_ENUMS4
message = incr_msg_time(message)
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' not in self.sent_messages[num_emails].subject()
assert 'Services' in body and 'CT Scan' not in body
assert 'General Surgery' not in body and 'X-Ray' not in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check email with an enum overwrite
message.body = SAMPLE_EMAIL_ENUMS_OVERWRITE
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' not in self.sent_messages[num_emails].subject()
assert 'Services' in body and 'X-Ray' in body
assert 'General Surgery' not in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check email with enums and error
message.body = SAMPLE_EMAIL_ENUMS_WITH_ERRORS
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' in self.sent_messages[num_emails].subject()
assert 'Services' in body and 'x' in body
assert 'requires all values' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check email with an abbreviation for the attribute name
message.body = SAMPLE_EMAIL_WITH_ABBREVIATION
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' not in self.sent_messages[num_emails].subject()
assert 'Total beds' in body and '9999' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check email with multiple abbreviations in the attribute
# names and values
message.body = SAMPLE_EMAIL_WITH_ABBREVIATION2
message = incr_msg_time(message)
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' not in self.sent_messages[num_emails].subject()
assert 'Total beds' in body and '9999' in body
assert 'Available beds' in body and '10000' in body
assert 'Services' in body and 'CT Scan' in body
assert 'General Surgery' in body and 'X-Ray' in body
assert 'Operational status' in body and 'Operational' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
# check email with mixed attribute names
message.body = SAMPLE_EMAIL_WITH_AMBIGUOUS_MAP
mail_editor_.receive(message)
body = self.sent_messages[num_emails].textbody()
assert 'ERROR' not in self.sent_messages[num_emails].subject()
assert 'Total beds' in body and '8888' in body
num_emails += 1
assert len(self.sent_messages) == num_emails
def test_mail_editor_process_email(self):
"""Confirms that process_email() returns a properly formatted structure
of updates and errors, given the body of an email."""
mail_editor_ = mail_editor.MailEditor()
mail_editor_.request = Struct(headers={'Host': 'localhost:8080'})
mail_editor_.init(message=Struct(
sender='[email protected]',
to='[email protected]'))
# check working email body
data = mail_editor_.process_email(SAMPLE_EMAIL_WORKING)
updates = data.update_stanzas
assert updates[0][0].key().name() == 'haiti:example.org/123'
# updates[first_update][subject_data][attribute_#]
assert 'available_beds' in updates[0][1][0]
assert 'total_beds' in updates[0][1][1]
assert 'email' in updates[0][1][2]
assert 'commune' in updates[0][1][3]
assert 'can_pick_up_patients' in updates[0][1][4]
assert not data.notice_stanzas
# check broken email body
data = mail_editor_.process_email(SAMPLE_EMAIL_BROKEN)
updates = data.update_stanzas
errors = data.notice_stanzas
assert updates[0][0].key().name() == 'haiti:example.org/123'
assert 'Available beds: d' in errors[0][1][0]['original_line']
assert len(updates[0][1]) == 4
# check quoted email body
data = mail_editor_.process_email(SAMPLE_EMAIL_QUOTED)
updates = data.update_stanzas
assert updates[0][0].key().name() == 'haiti:example.org/123'
assert 'available_beds' in updates[0][1][0]
assert 'total_beds' in updates[0][1][1]
assert 'email' in updates[0][1][2]
assert 'commune' in updates[0][1][3]
assert 'can_pick_up_patients' in updates[0][1][4]
assert not data.notice_stanzas
# check mixed email body
data = mail_editor_.process_email(SAMPLE_EMAIL_MIXED)
updates = data.update_stanzas
assert updates[0][0].key().name() == 'haiti:example.org/123'
assert 'available_beds' in updates[0][1][0]
assert 'total_beds' in updates[0][1][1]
assert 'email' in updates[0][1][2]
assert 'commune' in updates[0][1][3]
assert 'can_pick_up_patients' in updates[0][1][4]
assert not data.notice_stanzas
# check stop delimeter'd body
data = mail_editor_.process_email(SAMPLE_EMAIL_STOP)
updates = data.update_stanzas
assert updates[0][0].key().name() == 'haiti:example.org/123'
assert 'available_beds' in updates[0][1][0]
assert 'total_beds' in updates[0][1][1]
assert 'email' in updates[0][1][2]
assert 'commune' in updates[0][1][3]
assert not data.notice_stanzas
def test_mail_editor_update_subjects(self):
"""Confirm that update_subjects() properly updates the datastore."""
mail_editor_ = mail_editor.MailEditor()
mail_editor_.account = self.account
mail_editor_.request = Struct(headers={'Host': 'localhost:8080'})
mail_editor_.init(message=Struct(
sender='[email protected]',
to='[email protected]'))
data = mail_editor_.process_email(SAMPLE_EMAIL_WORKING)
mail_editor_.update_subjects(
data.update_stanzas, datetime.datetime(2010, 8, 4))
subject = Subject.get('haiti', 'example.org/123')
assert subject.get_value('available_beds') == 18
assert subject.get_value('total_beds') == 222
assert subject.get_value('commune') == 'foo@bar!'
assert subject.get_value('email') == '[email protected]'
assert subject.get_value('can_pick_up_patients')
def test_mail_editor_send_email(self):
"""Confirms that the appropriate information is sent in an email back to
the user as a confirmation / response / request / whatever. Ignoring the
formatting of the email, as that is subject to change."""
self.sent_messages = []
message = mail.InboundEmailMessage(
sender=self.account.email,
to='[email protected]',
subject='Resource Finder Updates',
body=SAMPLE_EMAIL_WORKING,
date='Wed, 04 Aug 2010 13:07:18 -0400')
request = Struct(url='test/path', path='/path',
headers={'Host': 'localhost:8080'})
mail_editor_ = mail_editor.MailEditor()
mail_editor_.account = self.account
mail_editor_.need_profile_info = False
mail_editor_.request = request
mail_editor_.init(message=Struct(
sender='[email protected]',
to='[email protected]'))
data = mail_editor_.process_email(SAMPLE_EMAIL_WORKING)
mail_editor_.send_email(message, data)
# updates, no errors
assert len(self.sent_messages) == 1
body = self.sent_messages[0].textbody()
self.check_for_correct_update(body, self.sent_messages[0])
# updates, errors
message.body = SAMPLE_EMAIL_BROKEN
data = mail_editor_.process_email(SAMPLE_EMAIL_BROKEN)
mail_editor_.send_email(message, data)
assert len(self.sent_messages) == 2
body = self.sent_messages[1].textbody()
assert 'ERROR' in self.sent_messages[1].subject()
assert 'update' in body
assert 'REFERENCE DOCUMENT' in body
mail_editor_.account = None
mail_editor_.need_profile_info = True
db.delete(self.account)
# need authentication
message.body = SAMPLE_EMAIL_WORKING
data = mail_editor_.process_email(SAMPLE_EMAIL_WORKING)
mail_editor_.send_email(message, data)
assert len(self.sent_messages) == 3
body = self.sent_messages[2].textbody()
assert 'ERROR' not in self.sent_messages[2].subject()
assert 'nickname' in body
assert 'affiliation' in body
assert 'Pending updates' in body
def test_match_nickname_affiliation(self):
mail_editor_ = mail_editor.MailEditor()
mail_editor_.update_line_flags = re.UNICODE | re.MULTILINE | re.I
assert mail_editor_.match_nickname_affiliation(
SAMPLE_EMAIL_AUTHENTICATION) == ('nickname_foo', 'affiliation_foo')
assert mail_editor_.match_nickname_affiliation(
SAMPLE_EMAIL_PARTIAL_AUTHENTICATION) == ('nickname_foo', None)
def test_mail_editor_extract_subject_from_update_line(self):
mail_editor_ = mail_editor.MailEditor()
mail_editor_.update_line_flags = re.UNICODE | re.MULTILINE | re.I
message = mail_editor.mail.EmailMessage(
sender=self.account.email,
to='[email protected]',
subject='Resource Finder Updates',
body=SAMPLE_EMAIL_WORKING)
mail_editor_.request = Struct(headers={'Host': 'localhost:80'})
mail_editor_.init(message)
match = re.match (mail_editor_.update_line_regexes['unquoted'],
SAMPLE_EMAIL_WORKING,
flags=mail_editor_.update_line_flags)
assert mail_editor_.extract_subject_from_update_line(match).get_value(
'title') == 'title_foo'
match = re.match(mail_editor_.update_line_regexes['quoted'],
SAMPLE_EMAIL_QUOTED,
flags=mail_editor_.update_line_flags)
assert mail_editor_.extract_subject_from_update_line(match).get_value(
'title') == 'title_foo'
match = re.match(mail_editor_.update_line_regexes['unquoted'],
SAMPLE_EMAIL_AMBIGUOUS,
flags=mail_editor_.update_line_flags)
subjects = mail_editor_.extract_subject_from_update_line(match)
assert len(subjects) == 2
assert (subjects[0].get_value('title') == subjects[1].get_value('title')
== 'title_foobar')
match = re.match(mail_editor_.update_line_regexes['unquoted'],
SAMPLE_EMAIL_AMBIGUOUS_UPDATE_WORKING,
flags=mail_editor_.update_line_flags)
assert mail_editor_.extract_subject_from_update_line(match).get_value(
'title') == 'title_foo'
def test_mail_editor_get_attribute_matches(self):
mail_editor_ = mail_editor.MailEditor()
mail_editor_.account = self.account
matches = mail_editor_.get_attribute_matches(
self.subject_type, 'commune code 1')
assert len(matches) == 2
assert matches[0][0] == 'commune_code'
assert matches[1][0] == 'commune'
match = mail_editor_.get_attribute_matches(
self.subject_type, 'commune: code 1')
assert match[0] == 'commune'
assert match[1] == 'code 1'
match = mail_editor_.get_attribute_matches(
self.subject_type, 'commune code: 1')
assert match[0] == 'commune_code'
assert match[1] == '1'
match = mail_editor_.get_attribute_matches(
self.subject_type, 'commune code:1')
assert match[0] == 'commune_code'
assert match[1] == '1'
def test_match_email(self):
assert mail_editor.match_email('[email protected]') == '[email protected]'
assert (mail_editor.match_email(u't\[email protected]') ==
u't\[email protected]')
assert (mail_editor.match_email(' [email protected] ') ==
'[email protected]')
assert (mail_editor.match_email('"First Last" ' +
'<[email protected]>') == '[email protected]')
assert (mail_editor.match_email('12_3%[email protected]') ==
'12_3%[email protected]')
assert (mail_editor.match_email('<[email protected]>') ==
'[email protected]')
assert not mail_editor.match_email('test@')
assert not mail_editor.match_email('.com')
assert not mail_editor.match_email('test@examplecom')
assert not mail_editor.match_email('test')
assert not mail_editor.match_email('')
def test_update_line_regexes(self):
mail_editor_ = mail_editor.MailEditor()
mail_editor_.request = Struct(headers={'Host': 'localhost:8080'})
mail_editor_.init(message=Struct(
sender='[email protected]',
to='[email protected]'))
line_title_key = 'update Title Foo (example.org/123)'
line_key = 'update (example.org/123)'
line_title = 'update Title Foo'
line_extra_chars_key_title = 'update Title_foo (ICU) ' + \
'(example.org/123)'
line_extra_chars_title = 'update Title-foo (ICU)'
line_extra_chars_title_snafu = 'update Title_foo (I:CU)'
line_unicode_title_key = u'upDAte Titl\u00e9Foo (example.org/123)'
line_unicode_title = u'update Titl\u00e9Foo'
line_unicode_key = u'update (\u00e9xample.org/123)'
def match(regex, line):
return re.match(regex, line, flags=mail_editor_.update_line_flags)
def check_regex_without_key(regex, prefix=''):
m = match(regex, prefix + line_title_key).groupdict()
assert m['subject'].strip() == 'Title Foo (example.org/123)'
m = match(regex, prefix + line_key).groupdict()
assert m['subject'].strip() == '(example.org/123)'
m = match(regex, prefix + line_title).groupdict()
assert m['subject'].strip() == 'Title Foo'
m = match(regex, prefix + line_extra_chars_key_title).groupdict()
assert m['subject'].strip() == 'Title_foo (ICU) (example.org/123)'
m = match(regex, prefix + line_extra_chars_title).groupdict()
assert m['subject'].strip() == 'Title-foo (ICU)'
m = match(regex, prefix + line_extra_chars_title_snafu).groupdict()
assert m['subject'].strip() == 'Title_foo (I:CU)'
m = match(regex, prefix + line_unicode_title_key).groupdict()
assert m['subject'].strip() == u'Titl\u00e9Foo (example.org/123)'
m = match(regex, prefix + line_unicode_title).groupdict()
assert m['subject'].strip() == u'Titl\u00e9Foo'
m = match(regex, prefix + line_unicode_key).groupdict()
assert m['subject'].strip() == u'(\u00e9xample.org/123)'
# test unquoted base without key
regex = mail_editor_.update_line_regexes['unquoted']
check_regex_without_key(regex)
# test quoted base without key
regex = mail_editor_.update_line_regexes['quoted']
check_regex_without_key(regex, '>> ')
def test_parse_utc_offset(self):
assert not mail_editor.parse_utc_offset('')
assert not mail_editor.parse_utc_offset('test')
assert not mail_editor.parse_utc_offset('0350')
assert not mail_editor.parse_utc_offset('-9999')
assert not mail_editor.parse_utc_offset('+3124')
assert mail_editor.parse_utc_offset('-0134') == -datetime.timedelta(
hours=1, minutes=34)
assert mail_editor.parse_utc_offset('+0134') == datetime.timedelta(
0, 5640)
def test_mail_update_text(self):
"""Checks to make sure that no input strings from the MailUpdateText
table are used twice within the same subject type."""
setup_subdomains()
setup_subject_types()
for subdomain in cache.SUBDOMAINS:
for type in cache.SUBJECT_TYPES[subdomain]:
map = {}
for attribute in cache.SUBJECT_TYPES[
subdomain][type].attribute_names:
for value in cache.MAIL_UPDATE_TEXTS['attribute_name'][
attribute].en:
if value in map:
assert False
map[value] = 1
for value in cache.ATTRIBUTES[attribute].values:
for text in cache.MAIL_UPDATE_TEXTS['attribute_value'][
value].en:
if text in map:
assert False
map[text] = 1
def check_for_correct_update(self, body, message):
assert body.count('--- --- --- ---') == 2
assert 'title_foo (example.org/123)\n' in body
assert 'Email' in body and '[email protected]' in body
assert 'Commune' in body and 'foo@bar!' in body
assert 'Available beds' in body and '18' in body
assert 'Total beds' in body and '222' in body
assert 'Can pick up patients' in body and 'Yes' in body
assert '/help/email' in body
assert 'update title_foo' not in body
assert 'ERROR' not in message.subject()
assert self.email == message.to_list()[0]
assert ('[email protected]' ==
message.sender())
|
py | b4166267c10fae5c02334a44d2fce93cf1994b9c | # Copyright 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sklearn.exceptions import NotFittedError
import pytest
from mpi4py import MPI
def test_distributed_srm(): # noqa: C901
import brainiak.funcalign.srm
s = brainiak.funcalign.srm.SRM()
assert s, "Invalid SRM instance!"
import numpy as np
np.random.seed(0)
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
nrank = comm.Get_size()
voxels = 100
samples = 500
subjects = 2
features = 3
s = brainiak.funcalign.srm.SRM(n_iter=5, features=features, comm=comm)
assert s, "Invalid SRM instance!"
# Create a Shared response S with K = 3
theta = np.linspace(-4 * np.pi, 4 * np.pi, samples)
z = np.linspace(-2, 2, samples)
r = z**2 + 1
x = r * np.sin(theta)
y = r * np.cos(theta)
S = np.vstack((x, y, z))
# DSRM: broadcast S
S = comm.bcast(S)
X = []
W = []
# DSRM: only append on rank 0
Q, R = np.linalg.qr(np.random.random((voxels, features)))
tmp_noise = 0.1*np.random.random((voxels, samples))
if rank == 0:
W.append(Q)
X.append(Q.dot(S) + tmp_noise)
else:
W.append(None)
X.append(None)
# Check that transform does NOT run before fitting the model
with pytest.raises(NotFittedError):
s.transform(X)
if rank == 0:
print("Test: transforming before fitting the model")
# Check that it does NOT run with 1 subject
with pytest.raises(ValueError):
s.fit(X)
if rank == 0:
print("Test: running SRM with 1 subject")
# DSRM: cyclic distribution of subject data, otherwise None
for subject in range(1, subjects):
Q, R = np.linalg.qr(np.random.random((voxels, features)))
tmp_noise = 0.1*np.random.random((voxels, samples))
if subject % nrank == rank:
W.append(Q)
X.append(Q.dot(S) + tmp_noise)
else:
W.append(None)
X.append(None)
# Check that runs with 2 subject
s.fit(X)
from pathlib import Path
sr_v0_4 = np.load(Path(__file__).parent / "sr_v0_4.npz")['sr']
assert(np.allclose(sr_v0_4, s.s_))
assert len(s.w_) == subjects, (
"Invalid computation of SRM! (wrong # subjects in W)")
for subject in range(subjects):
if s.w_[subject] is not None:
assert s.w_[subject].shape[0] == voxels, (
"Invalid computation of SRM! (wrong # voxels in W)")
assert s.w_[subject].shape[1] == features, (
"Invalid computation of SRM! (wrong # features in W)")
ortho = np.linalg.norm(s.w_[subject].T.dot(s.w_[subject])
- np.eye(s.w_[subject].shape[1]),
'fro')
assert ortho < 1e-7, "A Wi mapping is not orthonormal in SRM."
difference = np.linalg.norm(X[subject] - s.w_[subject].dot(s.s_),
'fro')
datanorm = np.linalg.norm(X[subject], 'fro')
assert difference/datanorm < 1.0, (
"Model seems incorrectly computed.")
assert s.s_.shape[0] == features, (
"Invalid computation of SRM! (wrong # features in S)")
assert s.s_.shape[1] == samples, (
"Invalid computation of SRM! (wrong # samples in S)")
# Check that it does run to compute the shared response after the model
# computation
new_s = s.transform(X)
assert len(new_s) == subjects, (
"Invalid computation of SRM! (wrong # subjects after transform)")
for subject in range(subjects):
if new_s[subject] is not None:
assert new_s[subject].shape[0] == features, (
"Invalid computation of SRM! (wrong # features after "
"transform)")
assert new_s[subject].shape[1] == samples, (
"Invalid computation of SRM! (wrong # samples after "
"transform)")
# Check that it does NOT run with non-matching number of subjects
with pytest.raises(ValueError):
s.transform([X[1]])
if rank == 0:
print("Test: transforming with non-matching number of subjects")
# Check that it does not run without enough samples (TRs).
with pytest.raises(ValueError):
s.set_params(features=(samples+1))
s.fit(X)
if rank == 0:
print("Test: not enough samples")
# Check that it does not run with different number of samples (TRs)
if rank == 0:
S2 = S[:, :-2]
X.append(Q.dot(S2))
else:
X.append(None)
with pytest.raises(ValueError):
s.fit(X)
if rank == 0:
print("Test: different number of samples per subject")
test_distributed_srm()
|
py | b4166361d86978bd696bc69f164a860abfcd86e1 | # -*- coding: utf-8 -*-
import numpy as np
import pytest
from celerite import terms as cterms
from celerite2 import terms
test_terms = [
cterms.RealTerm(log_a=np.log(2.5), log_c=np.log(1.1123)),
cterms.RealTerm(log_a=np.log(12.345), log_c=np.log(1.5))
+ cterms.RealTerm(log_a=np.log(0.5), log_c=np.log(1.1234)),
cterms.ComplexTerm(
log_a=np.log(10.0), log_c=np.log(5.6), log_d=np.log(2.1)
),
cterms.ComplexTerm(
log_a=np.log(7.435),
log_b=np.log(0.5),
log_c=np.log(1.102),
log_d=np.log(1.05),
),
cterms.SHOTerm(
log_S0=np.log(1.1), log_Q=np.log(0.1), log_omega0=np.log(1.2)
),
cterms.SHOTerm(
log_S0=np.log(1.1), log_Q=np.log(2.5), log_omega0=np.log(1.2)
),
cterms.SHOTerm(
log_S0=np.log(1.1), log_Q=np.log(2.5), log_omega0=np.log(1.2)
)
+ cterms.RealTerm(log_a=np.log(1.345), log_c=np.log(2.4)),
cterms.SHOTerm(
log_S0=np.log(1.1), log_Q=np.log(2.5), log_omega0=np.log(1.2)
)
* cterms.RealTerm(log_a=np.log(1.345), log_c=np.log(2.4)),
cterms.Matern32Term(log_sigma=0.1, log_rho=0.4),
]
def _convert_kernel(celerite_kernel):
if isinstance(celerite_kernel, cterms.TermSum):
result = _convert_kernel(celerite_kernel.terms[0])
for k in celerite_kernel.terms[1:]:
result += _convert_kernel(k)
return result
elif isinstance(celerite_kernel, cterms.TermProduct):
return _convert_kernel(celerite_kernel.k1) * _convert_kernel(
celerite_kernel.k2
)
elif isinstance(celerite_kernel, cterms.RealTerm):
return terms.RealTerm(
a=np.exp(celerite_kernel.log_a), c=np.exp(celerite_kernel.log_c)
)
elif isinstance(celerite_kernel, cterms.ComplexTerm):
if not celerite_kernel.fit_b:
return terms.ComplexTerm(
a=np.exp(celerite_kernel.log_a),
b=0.0,
c=np.exp(celerite_kernel.log_c),
d=np.exp(celerite_kernel.log_d),
)
return terms.ComplexTerm(
a=np.exp(celerite_kernel.log_a),
b=np.exp(celerite_kernel.log_b),
c=np.exp(celerite_kernel.log_c),
d=np.exp(celerite_kernel.log_d),
)
elif isinstance(celerite_kernel, cterms.SHOTerm):
return terms.SHOTerm(
S0=np.exp(celerite_kernel.log_S0),
Q=np.exp(celerite_kernel.log_Q),
w0=np.exp(celerite_kernel.log_omega0),
)
elif isinstance(celerite_kernel, cterms.Matern32Term):
return terms.Matern32Term(
sigma=np.exp(celerite_kernel.log_sigma),
rho=np.exp(celerite_kernel.log_rho),
)
raise NotImplementedError()
@pytest.mark.parametrize("oterm", test_terms)
def test_consistency(oterm):
# Check that the coefficients are all correct
term = _convert_kernel(oterm)
for v1, v2 in zip(oterm.get_all_coefficients(), term.get_coefficients()):
assert np.allclose(v1, v2)
for v1, v2 in zip(
terms.OriginalCeleriteTerm(oterm).get_coefficients(),
term.get_coefficients(),
):
assert np.allclose(v1, v2)
# Make sure that the covariance matrix is right
np.random.seed(40582)
x = np.sort(np.random.uniform(0, 10, 50))
diag = np.random.uniform(0.1, 0.3, len(x))
assert np.allclose(oterm.get_value(x), term.get_value(x))
tau = x[:, None] - x[None, :]
K = term.get_value(tau)
assert np.allclose(oterm.get_value(tau), K)
# And the power spectrum
omega = np.linspace(-10, 10, 500)
assert np.allclose(oterm.get_psd(omega), term.get_psd(omega))
# Add in the diagonal
K[np.diag_indices_from(K)] += diag
# Matrix vector multiply
y = np.sin(x)
value = term.dot(x, diag, y)
assert np.allclose(y, np.sin(x))
assert np.allclose(value, np.dot(K, y))
# Matrix-matrix multiply
y = np.vstack([x]).T
value = term.dot(x, diag, y)
assert np.allclose(value, np.dot(K, y))
|
py | b416640357f7369a3ac281d93e2332dcd4c532f6 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
# Create your models here.
class Perm(models.Model):
''' custom permission '''
module = models.CharField(max_length=100)
name = models.CharField(max_length=100)
code = models.CharField(max_length=100)
class Meta:
''' class meta info '''
db_table = 'cus_permission'
def __str__(self):
return self.name
class Role(models.Model):
''' user role '''
name = models.CharField(max_length=30, unique=True)
users = models.ManyToManyField(User)
perms = models.ManyToManyField(Perm, blank=True)
class Meta:
''' class meta info '''
db_table = 'user_role'
def __str__(self):
return self.name
class Profile(models.Model):
''' user profile '''
user = models.OneToOneField(User, related_name='profile', on_delete=models.CASCADE)
phone = models.CharField(max_length=30, blank=True)
wechat = models.CharField(max_length=30, blank=True)
comment = models.TextField(max_length=100, blank=True)
class Meta:
''' class meta info '''
db_table = 'user_profile'
def __str__(self):
return self.comment
@receiver(post_save, sender=User)
def create_or_update_user_profile(sender, instance, created, **kwargs):
'''create or update user profile '''
if created:
Profile.objects.create(user=instance)
instance.profile.save()
class Service(models.Model):
name = models.CharField(max_length=100)
host = models.CharField(max_length=100)
port = models.IntegerField(blank=True)
path = models.CharField(max_length=200, blank=True)
user = models.CharField(max_length=100, blank=True)
password = models.CharField(max_length=100, blank=True)
comment = models.TextField(max_length=100, blank=True)
class Meta:
''' class meta info '''
db_table = 'admins_service'
def __str__(self):
return self.name
def __unicode__(self):
return self.name |
py | b4166503affc7ff11a1b9eb4e57417ec2f583841 | import functools
import glob
import json
import logging
import multiprocessing as mp
import numpy as np
import os
import re
from itertools import chain
import pycocotools.mask as mask_util
from PIL import Image
from detectron2.structures import BoxMode
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.utils.logger import setup_logger
from detectron2.utils.comm import get_world_size
from fvcore.common.file_io import PathManager
try:
import cv2 # noqa
except ImportError:
# OpenCV is an optional dependency at the moment
pass
class Grasp(object):
def __init__(self, points):
self.points = points
def __str__(self):
return str(self.points)
@property
def a_rad(self):
"""
:return: Angle of the grasp to the horizontal.
"""
dx = self.points[1, 1] - self.points[0, 1]
dy = self.points[1, 0] - self.points[0, 0]
return (np.arctan2(-dy, dx) + np.pi/2) % np.pi - np.pi/2
@property
def a(self):
return 180/np.pi * self.a_rad
@property
def c(self):
"""
:return: Rectangle center point
"""
return self.points.mean(axis=0).astype(np.int)
@property
def x(self):
return self.c[1]
@property
def y(self):
return self.c[0]
@property
def w(self):
"""
:return: Rectangle width (i.e. perpendicular to the axis of the grasp)
"""
dx = self.points[1, 1] - self.points[0, 1]
dy = self.points[1, 0] - self.points[0, 0]
return np.sqrt(dx ** 2 + dy ** 2)
@property
def h(self):
"""
:return: Rectangle height (i.e. along the axis of the grasp)
"""
dy = self.points[2, 1] - self.points[1, 1]
dx = self.points[2, 0] - self.points[1, 0]
return np.sqrt(dx ** 2 + dy ** 2)
@staticmethod
def load_grasps(f):
def text_to_num(l, offset=(0,0)):
x, y = l.split()
return [int(round(float(y))) - offset[0],
int(round(float(x))) - offset[1]]
while True:
# Load 4 lines at a time, corners of bounding box.
try:
p0 = f.readline()
if not p0:
break # EOF
p1, p2, p3 = f.readline(), f.readline(), f.readline()
gr = np.array([
text_to_num(p0),
text_to_num(p1),
text_to_num(p2),
text_to_num(p3)
])
yield Grasp(gr)
except ValueError:
# Some files contain weird values.
continue
@staticmethod
def load_grasps_plain(f):
for grasp in Grasp.load_grasps(f):
yield (grasp.x, grasp.y, grasp.w, grasp.h, grasp.a)
def load_cornell_instances(image_dir, to_polygons=True):
"""
Args:
image_dir (str): path to the raw dataset. e.g., "~/cityscapes/leftImg8bit/train".
to_polygons (bool): whether to represent the segmentation as polygons
(COCO's format) instead of masks (cityscapes's format).
Returns:
list[dict]: a list of dicts in Detectron2 standard format. (See
`Using Custom Datasets </tutorials/datasets.html>`_ )
"""
files = []
for grasps_file in glob.glob(os.path.join(image_dir, "*cpos.txt")):
assert os.path.isfile(grasps_file), grasps_file
cat_id = int(re.search("pcd(\d+)cpos.txt", grasps_file).group(1))
image_file = grasps_file.replace("cpos.txt", "r.png")
#image_file = grasps_file.replace("cpos.txt", "d.tiff") #TODO: using depth
assert os.path.isfile(image_file), image_file
neg_grasps_file = grasps_file.replace("cpos.txt", "cneg.txt")
assert os.path.isfile(neg_grasps_file), neg_grasps_file
files.append((cat_id, image_file, grasps_file, neg_grasps_file))
assert len(files), "No images found in {}".format(image_dir)
logger = logging.getLogger(__name__)
logger.info("Preprocessing cornell annotations ...")
# This is still not fast: all workers will execute duplicate works and will
# take up to 10m on a 8GPU server.
pool = mp.Pool(processes=max(mp.cpu_count() // get_world_size() // 2, 4))
ret = pool.map(
functools.partial(cornell_files_to_dict, to_polygons=to_polygons),
files,
)
logger.info("Loaded {} images from {}".format(len(ret), image_dir))
# Map ids to contiguous ids
#dataset_id_to_contiguous_id = {l.id: idx for idx, l in enumerate(os.listdir(image_dir))}
#for dict_per_image in ret:
# for anno in dict_per_image["annotations"]:
# anno["category_id"] = dataset_id_to_contiguous_id[anno["category_id"]]
return ret
def cornell_files_to_dict(files, to_polygons):
"""
Parse cornell annotation files to a dict.
Args:
files (tuple): consists of (image_file, instance_id_file, label_id_file, json_file)
to_polygons (bool): whether to represent the segmentation as polygons
(COCO's format) instead of masks (cityscapes's format).
Returns:
A dict in Detectron2 Dataset format.
"""
cat_id, image_file, grasps_file, neg_grasps_file = files
annos = []
# See also the official annotation parsing scripts at
# https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/instances2dict.py # noqa
with PathManager.open(image_file, "rb") as f:
inst_image = np.asarray(Image.open(f), order="F")
#flattened_ids = np.unique(inst_image)
flattened_ids = [0, 255]
ret = {
"file_name": image_file,
"image_id": os.path.basename(image_file),
"height": inst_image.shape[0],
"width": inst_image.shape[1],
}
#for instance_id in flattened_ids:
#anno["iscrowd"] = False
#mask = np.asarray(inst_image == instance_id, dtype=np.uint8, order="F")
#anno["segmentation"] = mask
#inds = np.nonzero(mask)
#ymin, ymax = inds[0].min(), inds[0].max()
#xmin, xmax = inds[1].min(), inds[1].max()
#anno["bbox"] = (xmin, ymin, xmax, ymax)
#if xmax <= xmin or ymax <= ymin:
# continue
#anno["bbox_mode"] = BoxMode.XYXY_ABS
# treat each grasp as an instance
anno = {}
anno["category_id"] = 0 # cat_id # TODO: assertion error
anno["iscrowd"] = False #True # TODO: add together with seg mask
anno["bbox_mode"] = BoxMode.XYWHA_ABS
with open(grasps_file) as f:
for xc, yc, w, h, a in Grasp.load_grasps_plain(f):
# careful: potential mistake in cornell format description on website, jaw and opening interchanged!
#print(xc, yc, opening, jaw, a)
assert xc >= 0, f"neg x value {grasps_file}"
assert yc >= 0, f"neg y value {grasps_file}"
#assert a >= 0, f"neg a value {grasps_file}"
assert w > 0, f"neg jaw value {grasps_file}"
assert h > 0, f"neg opening value {grasps_file}"
assert w*h >= 1, f"box area too small {grasps_file}"
anno["bbox"] = (xc, yc, w, h, a)
# classify angle region
#anno["category_id"] = int((a+90)/10) # 0 # cat_id # TODO
annos.append(anno.copy())
anno["category_id"] = 1 # cat_id # TODO: assertion error
with open(neg_grasps_file) as f:
for xc, yc, w, h, a in Grasp.load_grasps_plain(f):
# careful: potential mistake in cornell format description on website, jaw and opening interchanged!
#print(xc, yc, opening, jaw, a)
assert xc >= 0, f"neg x value {grasps_file}"
assert yc >= 0, f"neg y value {grasps_file}"
#assert a >= 0, f"neg a value {grasps_file}"
assert w > 0, f"neg jaw value {grasps_file}"
assert h > 0, f"neg opening value {grasps_file}"
assert w*h >= 1, f"box area too small {grasps_file}"
anno["bbox"] = (xc, yc, w, h, a)
# classify angle region
#anno["category_id"] = int((a+90)/10) # 0 # cat_id # TODO
annos.append(anno.copy())
ret["annotations"] = annos
return ret
def register_cornell(name, image_dir):
DatasetCatalog.register(
name,
lambda x=image_dir: load_cornell_instances(x, to_polygons=True),
)
MetadataCatalog.get(name).set(
#thing_classes=os.listdir(image_dir), # TODO: add together with segmentation
thing_classes=["grasp", "nograsp"],
#thing_classes=[f"{sector}grasp" for sector in range(18)], #TODO: put num-classes
stuff_classes=["nothing", "thing"],
image_dir=image_dir,
evaluator_type="cornell"
)
#sem_key = key.format(task="sem_seg")
#DatasetCatalog.register(
# sem_key, lambda x=image_dir, y=gt_dir: load_cityscapes_semantic(x, y)
#)
#MetadataCatalog.get(sem_key).set(
# image_dir=image_dir, gt_dir=gt_dir, evaluator_type="sem_seg", **meta
#)
if __name__ == "__main__":
"""
Test the cornell dataset loader.
Usage:
python -m detectron2.data.datasets.cityscapes \
cityscapes/leftImg8bit/train cityscapes/gtFine/train
"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("image_dir")
parser.add_argument("--type", choices=["instance", "semantic"], default="instance")
args = parser.parse_args()
from detectron2.data.catalog import Metadata
from detectron2.utils.visualizer import Visualizer
logger = setup_logger(name=__name__)
dirname = "cornell-data-vis"
os.makedirs(dirname, exist_ok=True)
if args.type == "instance":
dicts = load_cornell_instances(
args.image_dir, to_polygons=True
)
logger.info("Done loading {} samples.".format(len(dicts)))
meta = Metadata().set(
thing_classes=["grasp", "nograsp"],
#thing_classes=[f"{sector}grasp" for sector in range(18)],#os.listdir(args.image_dir),
stuff_classes=["nothing", "thing"]
)
for d in dicts:
img = np.array(Image.open(d["file_name"]))
visualizer = Visualizer(img, metadata=meta)
vis = visualizer.draw_dataset_dict(d)
# cv2.imshow("a", vis.get_image()[:, :, ::-1])
# cv2.waitKey()
fpath = os.path.join(dirname, os.path.basename(d["file_name"]))
vis.save(fpath)
|
py | b41665e0b8bbb220687863cf0afff5132c7f794c | from concurrent import futures
from enum import Enum
import logging
import threading
import grpc
import radiomessages_pb2
import radiomessages_pb2_grpc
from google.protobuf import empty_pb2
logger = logging.getLogger(__name__)
class RadioRPC():
"""
Class for invoking RPCs for the radio service.
"""
def __init__(self, host):
logger.info("Connecting to grpc channel")
self.channel = grpc.insecure_channel(host)
self.stub = radiomessages_pb2_grpc.RadioStub(self.channel)
self._listener_threads = {}
def play(self, url=None):
try:
logger.debug("Sending play request")
response = self.stub.Play(radiomessages_pb2.PlayRequest(url=url))
return RadioRPC._format_status(response)
except grpc.RpcError as e:
if e.code() == grpc.StatusCode.INVALID_ARGUMENT:
raise ValueError(e.details())
else:
raise
def stop(self):
logger.debug("Sending stop request")
return RadioRPC._format_status(self.stub.Stop(empty_pb2.Empty()))
def set_volume(self, volume):
logger.debug("Setting volume")
response = self.stub.SetVolume(radiomessages_pb2.VolumeRequest(volume=volume))
return RadioRPC._format_status(response)
def get_status(self):
logger.debug("Sending get status request")
return RadioRPC._format_status(self.stub.Status(empty_pb2.Empty()))
def subscribe_to_updates(self, listener):
logger.debug("Subscribing to updates")
def async_listener():
for status in self.stub.SubscribeToUpdates(empty_pb2.Empty()):
if status:
listener(RadioRPC._format_status(status))
t = threading.Thread(target=async_listener)
t.start()
self._listener_threads[listener] = t
def unsubscribe_to_updates(self, listener):
self.stub.UnsubscribeToUpdates(empty_pb2.Empty())
if listener in self._listener_threads:
self._listener_threads[listener].join()
@staticmethod
def _format_status(status):
"""
Converts a format into a generic dict representation
"""
return {
'url': status.url,
'state': RadioState(status.state),
'title': status.title,
'name': status.name,
'volume': status.volume,
'bitrate': status.bitrate,
}
class RadioState(Enum):
PLAYING = 0
STOPPED = 1
MUTED = 2
|
py | b41666d3df36ef714e314942286c52ee6e2102ce | import numpy as np
import cv2
from matplotlib import pyplot as plt
img = cv2.imread('../img/shyaro.jpg')
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
corners = cv2.goodFeaturesToTrack(gray,25,0.01,10)
corners = np.int0(corners)
for i in corners:
x,y = i.ravel()
cv2.circle(img,(x,y),3,255,-1)
plt.imshow(img),plt.show()
|
py | b41667109635437330f7314f46068de8d858fdc1 | import json
import logging
import datetime
import requests
from django.conf import settings
from django.contrib.auth import logout
from django.shortcuts import redirect, render, Http404
from .models import PublicExperience
from openhumans.models import OpenHumansMember
import io
import uuid
from StepperComponent import Stepper
logger = logging.getLogger(__name__)
def index(request, page="home"):
"""
Starting page for app.
"""
context = {
"navpage": page,
"navlinks": [
{
"linkTitle": "Home",
"linkLoc": "/home",
"linkName": "home",
"template": "home.html"
},
{
"linkTitle": "About",
"linkLoc": "/about",
"linkName": "about",
"template": "about.html"
},
{
"linkTitle": "Share",
"linkLoc": "/share",
"linkName": "share",
"template": "share.html"
},
{
"linkTitle": "Login",
"linkLoc": "/login",
"linkName": "login",
"template": "login.html"
}
],
"stepper": [
{
"id": 1,
"label": "Login"
},
{
"id": 2,
"label": "Define Profile"
},
{
"id": 3,
"label": "Add Event"
}
],
"ueftext": [
{
"rows": [
{
"qtext": "Where",
"qcolour": "#4d75ad",
"phtext": "Enter name of location or postcode...",
"input": "ip"
}
],
"maintext": "Where..."
},
{
"rows": [
{
"qtext": "What",
"qcolour": "#ffbb5d",
"phtext": "Your experience can be entered here...",
"input": "ta"
}
],
"maintext": "Enter your experience"
},
{
"rows": [
{
"qtext": "What",
"qcolour": "#ffbb5d",
"phtext": "",
"input": "ta"
}
],
"maintext": "What would you have wished to be different?"
}
],
"user_exp": [
{
"id": "32097868",
"datetime": "Sept 18, 2019, 10:31 a.m.",
"user_txt": [
{
"question": "Event",
"text": "The air conditioning in the room where I was having a meeting was really loud and I found it really hard to concentrate."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
},
{
"id": "19279611",
"datetime": "Sept 17, 2019, 8:46 a.m.",
"user_txt": [
{
"question": "Event",
"text": "The tube is too loud."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
},
{
"id": "32097868",
"datetime": "Sept 17, 2019, 8:45 a.m.",
"user_txt": [
{
"question": "Event",
"text": "I'm at a conference today and I found people not using the microphone really difficult - it makes it much harder to concentrate on what they were saying. I was much more distracted."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
}
],
"MONE_data": [
{
"UID": "0000001",
"EID": "32097868",
"date": "18/09/19",
"Event_What": "The air conditioning in the room where I was having a meeting was really loud and I found it really heard to concentrate, it was a rubbish experience.",
"Location_Where": "NW1 2HS",
"LikeToBeDifferent": "I would have liked the air conditioning to less loud to aid my concentration",
"Summary": "Loud Air Conditioning"
},
{
"UID": "0000002",
"EID": "32097867",
"date": "17/09/19",
"Event_What": "The tube is too loud.",
"Location_Where": "NW1 8NH",
"LikeToBeDifferent": "would have liked the tube to be less loud",
"Summary": "Loud Tube"
},
{
"UID": "0000003",
"EID": "32097866",
"date": "17/09/19",
"Event_What": "I'm at a conference today and I found the people not using the microphone really difficult - it makes it harder to concentrate on what they were saying. I was much more distracted.",
"Location_Where": "SE15 5DQ",
"LikeToBeDifferent": "For people in conferences to use a microphone. To aid my concentration and reduce my distraction.",
"Summary": "None use of microphone in conference"
}
],
"Documentation_data":[
{
"ID": "0000001",
"Date": "14/06/2019",
"Group": "moderation",
"Name": "experience-moderation-guidlines",
"Version": "0.4"
},
{
"ID": "0000002",
"Date": "02/09/2020",
"Group": "moderation",
"Name": "why-is-there-a-moderation-process",
"Version": "1"
},
{
"ID": "0000003",
"Date": "09/11/2020",
"Group": "instructional",
"Name": "experience-creation-and-submission",
"Version": "2"
},
],
"AP_data": [
{
"Title": "Navigation Adjustment",
"ID": "navadjust",
"arrow": "arrow_expandingpanel_na"
},
{
"Title": "Colour Adjustment",
"ID": "coladjust",
"arrow": "arrow_expandingpanel_ca"
},
{
"Title": "Content Adjustment",
"ID": "contadjust",
"arrow": "arrow_expandingpanel_cta"
}
],
"AP_blank":
{
"Title": "Expanding Panel",
"ID": "blankexpanel",
"arrow": "arrow_expandingpanel_bep",
"content": "Content that can be replaced"
},
'AP_gallery_panels':
{
"Title": "Panel Components - These components are based on core Bootstap components and form the core structural elements of the platform.",
"ID": "gal_expanel_panels",
"arrow": "arrow_expandingpanel_bep"
},
'AP_gallery_animated_panels':
{
"Title": "Animated Panel Components - These components are animated using javascript to show and hide them. They are core structural elements of the platform.",
"ID": "gal_expanel_animated_panel",
"arrow": "arrow_expandingpanel_bep"
},
'AP_gallery_navigation':
{
"Title": "Navigation Components - These components are used for platform navigation, they allow the platform user to move around the available sections/pages of the platform and to other related content.",
"ID": "gal_expanel_navigation",
"arrow": "arrow_expandingpanel_bep"
},
'AP_HCL':
{
"Desc": "Some people cannot read text if there is not sufficient contrast between the text and background. For others, bright colours (high luminance) are not readable; they need low luminance."
},
"peed_ele_row": [
{
"peed_ele_col": [
{
"text": "I",
"icon": "icon-Autistic-Person"
},
{
"text": "Audio Desc",
"icon": "icon-audio-description"
},
{
"text": "Account",
"icon": "icon-account_circle"
},
{
"text": "Add box",
"icon": "icon-add_box"
}
]
},
{
"peed_ele_col": [
{
"text": "Add",
"icon": "icon-add"
},
{
"text": "Apps",
"icon": "icon-apps-24px"
},
{
"text": "Bar Chart",
"icon": "icon-bar_chart"
},
{
"text": "Camera",
"icon": "icon-camera_alt"
}
]
},
{
"peed_ele_col": [
{
"text": "Tick",
"icon": "icon-check-circle-together"
},
{
"text": "Cross",
"icon": "icon-close"
},
{
"text": "Smile",
"icon": "icon-comment-alt-smile"
},
{
"text": "Compass",
"icon": "icon-compass"
}
]
},
{
"peed_ele_col": [
{
"text": "CSP",
"icon": "icon-csp-lblue"
},
{
"text": "Database",
"icon": "icon-database-solid"
},
{
"text": "Email",
"icon": "icon-email"
},
{
"text": "Fast Food",
"icon": "icon-fastfood"
}
]
},
{
"peed_ele_col": [
{
"text": "Image",
"icon": "icon-image"
},
{
"text": "School",
"icon": "icon-school"
},
{
"text": "Language",
"icon": "icon-language"
},
{
"text": "No",
"icon": "icon-no"
}
]
},
],
"peed_fld": [
{
"number": "2.",
"title": "Sensory"
}
],
"peed_ele_master": [
{
"text": "I",
"icon": "icon-Autistic-Person"
},
{
"text": "Audio Desc",
"icon": "icon-audio-description"
},
{
"text": "Account",
"icon": "icon-account_circle"
},
{
"text": "Add box",
"icon": "icon-add_box"
},
{
"text": "Add",
"icon": "icon-add"
},
{
"text": "Apps",
"icon": "icon-apps-24px"
},
{
"text": "Bar Chart",
"icon": "icon-bar_chart"
},
{
"text": "Camera",
"icon": "icon-camera_alt"
},
{
"text": "Tick",
"icon": "icon-check-circle-together"
},
{
"text": "Cross",
"icon": "icon-close"
},
{
"text": "Smile",
"icon": "icon-comment-alt-smile"
},
{
"text": "Compass",
"icon": "icon-compass"
},
{
"text": "CSP",
"icon": "icon-csp-lblue"
},
{
"text": "Database",
"icon": "icon-database-solid"
},
{
"text": "Email",
"icon": "icon-email"
},
{
"text": "Fast Food",
"icon": "icon-fastfood"
},
{
"text": "Image",
"icon": "icon-image"
},
{
"text": "School",
"icon": "icon-school"
},
{
"text": "Language",
"icon": "icon-language"
},
{
"text": "No",
"icon": "icon-no"
}
],
}
stepper_object = Stepper.Stepper(request)
stepper_object.update()
auth_url = OpenHumansMember.get_auth_url()
context = {**context, **{'auth_url': auth_url}} # ,
# 'oh_proj_page': settings.OH_PROJ_PAGE}}
if request.user.is_authenticated:
return redirect('overview')
# return render(request, 'index.html', context=context)
# if(page == "error"):
# raise Http404("page does not exist: error")
# else:
return render(request, 'index.html', context=context)
def split(request, page="home"):
"""
Starting page for app.
"""
context = {
"navpage": page,
"navlinks": [
{
"linkTitle": "Home",
"linkLoc": "/split/home",
"linkName": "home",
"template": "home.html"
},
{
"linkTitle": "About",
"linkLoc": "/split/about",
"linkName": "about",
"template": "about.html"
},
{
"linkTitle": "Share",
"linkLoc": "/split/share",
"linkName": "share",
"template": "share.html"
},
{
"linkTitle": "Login",
"linkLoc": "/split/login",
"linkName": "login",
"template": "login.html"
}
],
"stepper": [
{
"id": 1,
"label": "Login"
},
{
"id": 2,
"label": "Define Profile"
},
{
"id": 3,
"label": "Add Event"
}
],
"ueftext": [
{
"rows": [
{
"qtext": "Where",
"qcolour": "#4d75ad",
"phtext": "Enter name of location or postcode...",
"input": "ip"
},
{
"qtext": "What",
"qcolour": "#ffbb5d",
"phtext": "Your experience can be entered here...",
"input": "ta"
}
],
"maintext": "Enter your experience"
},
{
"rows": [
{
"qtext": "What",
"qcolour": "#ffbb5d",
"phtext": "",
"input": "ta"
}
],
"maintext": "What would you have wished to be different?"
}
],
"user_exp": [
{
"id": "32097868",
"datetime": "Sept 18, 2019, 10:31 a.m.",
"user_txt": [
{
"question": "Event",
"text": "The air conditioning in the room where I was having a meeting was really loud and I found it really hard to concentrate."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
},
{
"id": "19279611",
"datetime": "Sept 17, 2019, 8:46 a.m.",
"user_txt": [
{
"question": "Event",
"text": "The tube is too loud."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
},
{
"id": "32097868",
"datetime": "Sept 17, 2019, 8:45 a.m.",
"user_txt": [
{
"question": "Event",
"text": "I'm at a conference today and I found people not using the microphone really difficult - it makes it much harder to concentrate on what they were saying. I was much more distracted."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
}
],
"MONE_data": [
{
"UID": "0000001",
"EID": "32097868",
"date": "18/09/19",
"Event_What": "The air conditioning in the room where I was having a meeting was really loud and I found it really heard to concentrate, it was a rubbish experience.",
"Location_Where": "NW1 2HS",
"LikeToBeDifferent": "I would have liked the air conditioning to less loud to aid my concentration",
"Summary": "Loud Air Conditioning"
},
{
"UID": "0000002",
"EID": "32097867",
"date": "17/09/19",
"Event_What": "The tube is too loud.",
"Location_Where": "NW1 8NH",
"LikeToBeDifferent": "would have liked the tube to be less loud",
"Summary": "Loud Tube"
},
{
"UID": "0000003",
"EID": "32097866",
"date": "17/09/19",
"Event_What": "I'm at a conference today and I found the people not using the microphone really difficult - it makes it harder to concentrate on what they were saying. I was much more distracted.",
"Location_Where": "SE15 5DQ",
"LikeToBeDifferent": "For people in conferences to use a microphone. To aid my concentration and reduce my distraction.",
"Summary": "None use of microphone in conference"
}
],
"Documentation_data":[
{
"ID": "0000001",
"Date": "14/06/2019",
"Group": "moderation",
"Name": "experience-moderation-guidlines",
"Version": "0.4"
},
{
"ID": "0000002",
"Date": "02/09/2020",
"Group": "moderation",
"Name": "why-is-there-a-moderation-process",
"Version": "1"
},
{
"ID": "0000003",
"Date": "09/11/2020",
"Group": "instructional",
"Name": "experience-creation-and-submission",
"Version": "2"
},
],
"AP_data": [
{
"Title": "Navigation Adjustment",
"ID": "navadjust",
"arrow": "arrow_expandingpanel_na"
},
{
"Title": "Colour Adjustment",
"ID": "coladjust",
"arrow": "arrow_expandingpanel_ca"
},
{
"Title": "Content Adjustment",
"ID": "contadjust",
"arrow": "arrow_expandingpanel_cta"
}
],
"AP_blank":
{
"Title": "Expanding Panel",
"ID": "blankexpanel",
"arrow": "arrow_expandingpanel_bep",
"content": "Content that can be replaced"
},
'AP_gallery_panels':
{
"Title": "Panel Components - These components are based on core Bootstap components and form the core structural elements of the platform.",
"ID": "gal_expanel_panels",
"arrow": "arrow_expandingpanel_bep"
},
'AP_gallery_animated_panels':
{
"Title": "Animated Panel Components - These components are animated using javascript to show and hide them. They are core structural elements of the platform.",
"ID": "gal_expanel_animated_panel",
"arrow": "arrow_expandingpanel_bep"
},
'AP_gallery_navigation':
{
"Title": "Navigation Components - These components are used for platform navigation, they allow the platform user to move around the available sections/pages of the platform and to other related content.",
"ID": "gal_expanel_navigation",
"arrow": "arrow_expandingpanel_bep"
},
'AP_HCL':
{
"Desc": "Some people cannot read text if there is not sufficient contrast between the text and background. For others, bright colours (high luminance) are not readable; they need low luminance."
},
"peed_ele_row": [
{
"peed_ele_col": [
{
"text": "I",
"icon": "icon-Autistic-Person"
},
{
"text": "Audio Desc",
"icon": "icon-audio-description"
},
{
"text": "Account",
"icon": "icon-account_circle"
},
{
"text": "Add box",
"icon": "icon-add_box"
}
]
},
{
"peed_ele_col": [
{
"text": "Add",
"icon": "icon-add"
},
{
"text": "Apps",
"icon": "icon-apps-24px"
},
{
"text": "Bar Chart",
"icon": "icon-bar_chart"
},
{
"text": "Camera",
"icon": "icon-camera_alt"
}
]
},
{
"peed_ele_col": [
{
"text": "Tick",
"icon": "icon-check-circle-together"
},
{
"text": "Cross",
"icon": "icon-close"
},
{
"text": "Smile",
"icon": "icon-comment-alt-smile"
},
{
"text": "Compass",
"icon": "icon-compass"
}
]
},
{
"peed_ele_col": [
{
"text": "CSP",
"icon": "icon-csp-lblue"
},
{
"text": "Database",
"icon": "icon-database-solid"
},
{
"text": "Email",
"icon": "icon-email"
},
{
"text": "Fast Food",
"icon": "icon-fastfood"
}
]
},
{
"peed_ele_col": [
{
"text": "Image",
"icon": "icon-image"
},
{
"text": "School",
"icon": "icon-school"
},
{
"text": "Language",
"icon": "icon-language"
},
{
"text": "No",
"icon": "icon-no"
}
]
},
],
"peed_fld": [
{
"number": "2.",
"title": "Sensory"
}
],
"peed_ele_master": [
{
"text": "I",
"icon": "icon-Autistic-Person"
},
{
"text": "Audio Desc",
"icon": "icon-audio-description"
},
{
"text": "Account",
"icon": "icon-account_circle"
},
{
"text": "Add box",
"icon": "icon-add_box"
},
{
"text": "Add",
"icon": "icon-add"
},
{
"text": "Apps",
"icon": "icon-apps-24px"
},
{
"text": "Bar Chart",
"icon": "icon-bar_chart"
},
{
"text": "Camera",
"icon": "icon-camera_alt"
},
{
"text": "Tick",
"icon": "icon-check-circle-together"
},
{
"text": "Cross",
"icon": "icon-close"
},
{
"text": "Smile",
"icon": "icon-comment-alt-smile"
},
{
"text": "Compass",
"icon": "icon-compass"
},
{
"text": "CSP",
"icon": "icon-csp-lblue"
},
{
"text": "Database",
"icon": "icon-database-solid"
},
{
"text": "Email",
"icon": "icon-email"
},
{
"text": "Fast Food",
"icon": "icon-fastfood"
},
{
"text": "Image",
"icon": "icon-image"
},
{
"text": "School",
"icon": "icon-school"
},
{
"text": "Language",
"icon": "icon-language"
},
{
"text": "No",
"icon": "icon-no"
}
],
}
stepper_object = Stepper.Stepper(request)
stepper_object.update()
auth_url = OpenHumansMember.get_auth_url()
context = {**context, **{'auth_url': auth_url}} # ,
# 'oh_proj_page': settings.OH_PROJ_PAGE}}
if request.user.is_authenticated:
return redirect('overview')
# return render(request, 'index.html', context=context)
# if(page == "error"):
# raise Http404("page does not exist: error")
# else:
return render(request, 'split.html', context=context)
def componentGallery(request):
context = {
"stepper": [
{
"id": 1,
"label": "Login"
},
{
"id": 2,
"label": "Define Profile"
},
{
"id": 3,
"label": "Add Event"
}
],
"ueftext": [
{
"rows": [
{
"qtext": "Where",
"qcolour": "#4d75ad",
"phtext": "Enter name of location or postcode...",
"input": "ip"
}
],
"maintext": "Where..."
},
{
"rows": [
{
"qtext": "What",
"qcolour": "#ffbb5d",
"phtext": "Your experience can be entered here...",
"input": "ta"
}
],
"maintext": "Enter your experience"
},
{
"rows": [
{
"qtext": "What",
"qcolour": "#ffbb5d",
"phtext": "",
"input": "ta"
}
],
"maintext": "What would you have wished to be different?"
}
],
"user_exp": [
{
"id": "32097868",
"datetime": "Sept 18, 2019, 10:31 a.m.",
"user_txt": [
{
"question": "Event",
"text": "The air conditioning in the room where I was having a meeting was really loud and I found it really hard to concentrate."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
},
{
"id": "19279611",
"datetime": "Sept 17, 2019, 8:46 a.m.",
"user_txt": [
{
"question": "Event",
"text": "The tube is too loud."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
},
{
"id": "32097868",
"datetime": "Sept 17, 2019, 8:45 a.m.",
"user_txt": [
{
"question": "Event",
"text": "I'm at a conference today and I found people not using the microphone really difficult - it makes it much harder to concentrate on what they were saying. I was much more distracted."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
}
],
"MONE_data": [
{
"UID": "0000001",
"EID": "32097868",
"date": "18/09/19",
"Event_What": "The air conditioning in the room where I was having a meeting was really loud and I found it really heard to concentrate, it was a rubbish experience.",
"Location_Where": "NW1 2HS",
"LikeToBeDifferent": "I would have liked the air conditioning to less loud to aid my concentration",
"Summary": "Loud Air Conditioning"
},
{
"UID": "0000002",
"EID": "32097867",
"date": "17/09/19",
"Event_What": "The tube is too loud.",
"Location_Where": "NW1 8NH",
"LikeToBeDifferent": "would have liked the tube to be less loud",
"Summary": "Loud Tube"
},
{
"UID": "0000003",
"EID": "32097866",
"date": "17/09/19",
"Event_What": "I'm at a conference today and I found the people not using the microphone really difficult - it makes it harder to concentrate on what they were saying. I was much more distracted.",
"Location_Where": "SE15 5DQ",
"LikeToBeDifferent": "For people in conferences to use a microphone. To aid my concentration and reduce my distraction.",
"Summary": "None use of microphone in conference"
}
],
"Documentation_data":[
{
"ID": "0000001",
"Date": "14/06/2019",
"Group": "moderation",
"Name": "experience-moderation-guidlines",
"Version": "0.4"
},
{
"ID": "0000002",
"Date": "02/09/2020",
"Group": "moderation",
"Name": "why-is-there-a-moderation-process",
"Version": "1"
},
{
"ID": "0000003",
"Date": "09/11/2020",
"Group": "instructional",
"Name": "experience-creation-and-submission",
"Version": "2"
},
],
'AP_data': [
{
"Title": "Navigation Adjustment",
"ID": "navadjust",
"arrow": "arrow_expandingpanel_na"
},
{
"Title": "Colour Adjustment",
"ID": "coladjust",
"arrow": "arrow_expandingpanel_ca"
},
{
"Title": "Content Adjustment",
"ID": "contadjust",
"arrow": "arrow_expandingpanel_cta"
}
],
'AP_blank':
{
"Title": "Expanding Panel",
"ID": "blankexpanel",
"arrow": "arrow_expandingpanel_bep",
"content": "Content that can be replaced"
},
'AP_gallery_panels':
{
"Title": "Panel Components - These components are based on core Bootstap components and form the core structural elements of the platform.",
"ID": "gal_expanel_panels",
"arrow": "arrow_expandingpanel_bep"
},
'AP_gallery_animated_panels':
{
"Title": "Animated Panel Components - These components are animated using javascript to show and hide them. They are core structural elements of the platform.",
"ID": "gal_expanel_animated_panel",
"arrow": "arrow_expandingpanel_bep"
},
'AP_gallery_navigation':
{
"Title": "Navigation Components - These components are used for platform navigation, they allow the platform user to move around the available sections/pages of the platform and to other related content.",
"ID": "gal_expanel_navigation",
"arrow": "arrow_expandingpanel_bep"
},
'AP_HCL':
{
"Desc": "Some people cannot read text if there is not sufficient contrast between the text and background. For others, bright colours (high luminance) are not readable; they need low luminance."
},
"peed_ele_row": [
{
"peed_ele_col": [
{
"text": "I",
"icon": "icon-Autistic-Person"
},
{
"text": "Audio Desc",
"icon": "icon-audio-description"
},
{
"text": "Account",
"icon": "icon-account_circle"
},
{
"text": "Add box",
"icon": "icon-add_box"
}
]
},
{
"peed_ele_col": [
{
"text": "Add",
"icon": "icon-add"
},
{
"text": "Apps",
"icon": "icon-apps-24px"
},
{
"text": "Bar Chart",
"icon": "icon-bar_chart"
},
{
"text": "Camera",
"icon": "icon-camera_alt"
}
]
},
{
"peed_ele_col": [
{
"text": "Tick",
"icon": "icon-check-circle-together"
},
{
"text": "Cross",
"icon": "icon-close"
},
{
"text": "Smile",
"icon": "icon-comment-alt-smile"
},
{
"text": "Compass",
"icon": "icon-compass"
}
]
},
{
"peed_ele_col": [
{
"text": "CSP",
"icon": "icon-csp-lblue"
},
{
"text": "Database",
"icon": "icon-database-solid"
},
{
"text": "Email",
"icon": "icon-email"
},
{
"text": "Fast Food",
"icon": "icon-fastfood"
}
]
},
{
"peed_ele_col": [
{
"text": "Image",
"icon": "icon-image"
},
{
"text": "School",
"icon": "icon-school"
},
{
"text": "Language",
"icon": "icon-language"
},
{
"text": "No",
"icon": "icon-no"
}
]
},
],
"peed_fld": [
{
"number": "2.",
"title": "Sensory"
}
],
"peed_ele_master": [
{
"text": "I",
"icon": "icon-Autistic-Person"
},
{
"text": "Audio Desc",
"icon": "icon-audio-description"
},
{
"text": "Account",
"icon": "icon-account_circle"
},
{
"text": "Add box",
"icon": "icon-add_box"
},
{
"text": "Add",
"icon": "icon-add"
},
{
"text": "Apps",
"icon": "icon-apps-24px"
},
{
"text": "Bar Chart",
"icon": "icon-bar_chart"
},
{
"text": "Camera",
"icon": "icon-camera_alt"
},
{
"text": "Tick",
"icon": "icon-check-circle-together"
},
{
"text": "Cross",
"icon": "icon-close"
},
{
"text": "Smile",
"icon": "icon-comment-alt-smile"
},
{
"text": "Compass",
"icon": "icon-compass"
},
{
"text": "CSP",
"icon": "icon-csp-lblue"
},
{
"text": "Database",
"icon": "icon-database-solid"
},
{
"text": "Email",
"icon": "icon-email"
},
{
"text": "Fast Food",
"icon": "icon-fastfood"
},
{
"text": "Image",
"icon": "icon-image"
},
{
"text": "School",
"icon": "icon-school"
},
{
"text": "Language",
"icon": "icon-language"
},
{
"text": "No",
"icon": "icon-no"
}
],
"navlinks": [
{
"linkTitle": "Home",
"linkLoc": "/home",
"linkName": "home",
"template": "home.html"
},
{
"linkTitle": "About",
"linkLoc": "/about",
"linkName": "about",
"template": "about.html"
},
{
"linkTitle": "Share",
"linkLoc": "/share",
"linkName": "share",
"template": "share.html"
},
{
"linkTitle": "Login",
"linkLoc": "/login",
"linkName": "login",
"template": "login.html"
}
],
}
stepper_object = Stepper.Stepper(request)
stepper_object.update()
auth_url = OpenHumansMember.get_auth_url()
context = {**context, **{'auth_url': auth_url}} # ,
# 'oh_proj_page': settings.OH_PROJ_PAGE}}
if request.user.is_authenticated:
oh_member = request.user.openhumansmember
context = {**context, **{'oh_id': oh_member.oh_id,
'oh_member': oh_member}} # ,
# 'oh_proj_page': settings.OH_PROJ_PAGE}}
return render(request, 'gallery.html', context=context)
def share(request):
context = {
"ueftext": [
{
"rows": [
{
"qtext": "Where",
"qcolour": "#4d75ad",
"phtext": "Enter name of location or postcode...",
"input": "ip"
}
],
"maintext": "Where..."
},
{
"rows": [
{
"qtext": "What",
"qcolour": "#ffbb5d",
"phtext": "Your experience can be entered here...",
"input": "ta"
}
],
"maintext": "Enter your experience"
},
{
"rows": [
{
"qtext": "What",
"qcolour": "#ffbb5d",
"phtext": "",
"input": "ta"
}
],
"maintext": "What would you have wished to be different?"
}
]
}
return render(request, 'share.html', context=context)
def moderationreject(request):
context = {
'mrtext': [
{
"rows": [
{
"qtext": "",
"qcolour": "#4d75ad",
"phtext": "Enter reasoning",
"input": "ta"
}
],
"maintext": "Why is this experience not appropriate?"
},
{
"rows": [
{
"qtext": "",
"qcolour": "#4d75ad",
"phtext": "Enter proposed changes",
"input": "ta"
}
],
"maintext": "How can this experience be improved?"
}
]
}
return render(request, 'moderationreject.html', context=context)
def configure(request):
return render(request, 'configure.html')
def getinvolved(request):
return render(request, 'getinvolved.html')
def view(request):
context = {
"stepper": [
{
"id": 1,
"label": "Login"
},
{
"id": 2,
"label": "Define Profile"
},
{
"id": 3,
"label": "Add Experience"
},
{
"id": 4,
"label": "View Experience"
}
],
}
stepper_object = Stepper.Stepper(request)
stepper_object.update()
return render(request, 'view.html', context=context)
def home(request):
return render(request, 'home.html')
def about(request):
return render(request, 'about.html')
def mydata(request):
context = {
"user_exp": [
{
"id": "32097868",
"datetime": "Sept 18, 2019, 10:31 a.m.",
"user_txt": [
{
"question": "Event",
"text": "The air conditioning in the room where I was having a meeting was really loud and I found it really hard to concentrate."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
},
{
"id": "19279611",
"datetime": "Sept 17, 2019, 8:46 a.m.",
"user_txt": [
{
"question": "Event",
"text": "The tube is too loud."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
},
{
"id": "32097868",
"datetime": "Sept 17, 2019, 8:45 a.m.",
"user_txt": [
{
"question": "Event",
"text": "I'm at a conference today and I found people not using the microphone really difficult - it makes it much harder to concentrate on what they were saying. I was much more distracted."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
}
]
}
return render(request, 'mydata.html', context=context)
def moderation(request):
context = {
"MONE_data": [
{
"UID": "0000001",
"EID": "32097868",
"date": "18/09/19",
"Event_What": "The air conditioning in the room where I was having a meeting was really loud and I found it really heard to concentrate, it was a rubbish experience.",
"Location_Where": "NW1 2HS",
"LikeToBeDifferent": "I would have liked the air conditioning to less loud to aid my concentration",
"Summary": "Loud Air Conditioning"
},
{
"UID": "0000002",
"EID": "32097867",
"date": "17/09/19",
"Event_What": "The tube is too loud.",
"Location_Where": "NW1 8NH",
"LikeToBeDifferent": "would have liked the tube to be less loud",
"Summary": "Loud Tube"
},
{
"UID": "0000003",
"EID": "32097866",
"date": "17/09/19",
"Event_What": "I'm at a conference today and I found the people not using the microphone really difficult - it makes it harder to concentrate on what they were saying. I was much more distracted.",
"Location_Where": "SE15 5DQ",
"LikeToBeDifferent": "For people in conferences to use a microphone. To aid my concentration and reduce my distraction.",
"Summary": "None use of microphone in conference"
}
],
"AP_data": [
{
"Title": "Navigation Adjustment",
"ID": "navadjust",
"arrow": "arrow_expandingpanel_na"
},
{
"Title": "Colour Adjustment",
"ID": "coladjust",
"arrow": "arrow_expandingpanel_ca"
},
{
"Title": "Content Adjustment",
"ID": "contadjust",
"arrow": "arrow_expandingpanel_cta"
}
],
"AP_blank":
{
"Title": "Expanding Panel",
"ID": "blankexpanel",
"arrow": "arrow_expandingpanel_bep"
},
"AP_HCL":
{
"Desc": "Some people cannot read text if there is not sufficient contrast between the text and background. For others, bright colours (high luminance) are not readable; they need low luminance."
}
}
stepper_object = Stepper.Stepper(request)
stepper_object.update()
auth_url = OpenHumansMember.get_auth_url()
context = {**context, **{'auth_url': auth_url}}#,
# 'oh_proj_page': settings.OH_PROJ_PAGE}}
if request.user.is_authenticated:
oh_member = request.user.openhumansmember
context = {**context, **{'oh_id': oh_member.oh_id,
'oh_member': oh_member}}#,
# 'oh_proj_page': settings.OH_PROJ_PAGE}}
return render(request, 'gallery.html', context=context)
def moderationreject(request):
context = {
'mrtext': [
{
"rows": [
{
"qtext": "",
"qcolour": "#4d75ad",
"phtext": "Enter reasoning",
"input": "ta"
}
],
"maintext": "Why is this experience not appropriate?"
},
{
"rows": [
{
"qtext": "",
"qcolour": "#4d75ad",
"phtext": "Enter proposed changes",
"input": "ta"
}
],
"maintext": "How can this experience be improved?"
}
]
}
return render(request, 'moderationreject.html', context=context)
def configure(request):
return render(request, 'configure.html')
def getinvolved(request):
return render(request, 'getinvolved.html')
def view(request):
context = {
"stepper": [
{
"id": 1,
"label": "Login"
},
{
"id": 2,
"label": "Define Profile"
},
{
"id": 3,
"label": "Add Experience"
},
{
"id": 4,
"label": "View Experience"
}
],
}
stepper_object = Stepper.Stepper(request)
stepper_object.update()
return render(request, 'view.html', context=context)
def mydata(request):
context = {
"user_exp": [
{
"id": "32097868",
"datetime": "Sept 18, 2019, 10:31 a.m.",
"user_txt": [
{
"question": "Event",
"text": "The air conditioning in the room where I was having a meeting was really loud and I found it really hard to concentrate."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
},
{
"id": "19279611",
"datetime": "Sept 17, 2019, 8:46 a.m.",
"user_txt": [
{
"question": "Event",
"text": "The tube is too loud."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
},
{
"id": "32097868",
"datetime": "Sept 17, 2019, 8:45 a.m.",
"user_txt": [
{
"question": "Event",
"text": "I'm at a conference today and I found people not using the microphone really difficult - it makes it much harder to concentrate on what they were saying. I was much more distracted."
},
{
"question": "What would you have liked to be different?",
"text": ""
}
]
}
]
}
return render(request, 'mydata.html', context=context)
def moderation(request):
context = {
"MONE_data": [
{
"UID": "0000001",
"EID": "32097868",
"date": "18/09/19",
"Event_What": "The air conditioning in the room where I was having a meeting was really loud and I found it really heard to concentrate, it was a rubbish experience.",
"Location_Where": "NW1 2HS",
"LikeToBeDifferent": "I would have liked the air conditioning to less loud to aid my concentration",
"Summary": "Loud Air Conditioning"
},
{
"UID": "0000002",
"EID": "32097867",
"date": "17/09/19",
"Event_What": "The tube is too loud.",
"Location_Where": "NW1 8NH",
"LikeToBeDifferent": "would have liked the tube to be less loud",
"Summary": "Loud Tube"
},
{
"UID": "0000003",
"EID": "32097866",
"date": "17/09/19",
"Event_What": "I'm at a conference today and I found the people not using the microphone really difficult - it makes it harder to concentrate on what they were saying. I was much more distracted.",
"Location_Where": "SE15 5DQ",
"LikeToBeDifferent": "For people in conferences to use a microphone. To aid my concentration and reduce my distraction.",
"Summary": "None use of microphone in conference"
}
]
}
return render(request, 'moderation.html', context=context)
def accessibility_settings(request):
return render(request, 'settings.html')
def login(request):
return render(request, 'login.html')
def overview(request):
if request.user.is_authenticated:
oh_member = request.user.openhumansmember
context = {'oh_id': oh_member.oh_id,
'oh_member': oh_member,
'oh_proj_page': settings.OH_PROJ_PAGE}
return render(request, 'overview.html', context=context)
return redirect('index')
def pictorialexperienceeditor(request):
context = {
"peed_ele_row": [
{
"peed_ele_col": [
{
"text": "I",
"icon": "icon-Autistic-Person"
},
{
"text": "Audio Desc",
"icon": "icon-audio-description"
},
{
"text": "Account",
"icon": "icon-account_circle"
},
{
"text": "Add box",
"icon": "icon-add_box"
}
]
},
{
"peed_ele_col": [
{
"text": "Add",
"icon": "icon-add"
},
{
"text": "Apps",
"icon": "icon-apps-24px"
},
{
"text": "Bar Chart",
"icon": "icon-bar_chart"
},
{
"text": "Camera",
"icon": "icon-camera_alt"
}
]
},
{
"peed_ele_col": [
{
"text": "Tick",
"icon": "icon-check-circle-together"
},
{
"text": "Cross",
"icon": "icon-close"
},
{
"text": "Smile",
"icon": "icon-comment-alt-smile"
},
{
"text": "Compass",
"icon": "icon-compass"
}
]
},
{
"peed_ele_col": [
{
"text": "CSP",
"icon": "icon-csp-lblue"
},
{
"text": "Database",
"icon": "icon-database-solid"
},
{
"text": "Email",
"icon": "icon-email"
},
{
"text": "Fast Food",
"icon": "icon-fastfood"
}
]
},
{
"peed_ele_col": [
{
"text": "Image",
"icon": "icon-image"
},
{
"text": "School",
"icon": "icon-school"
},
{
"text": "Language",
"icon": "icon-language"
},
{
"text": "No",
"icon": "icon-no"
}
]
},
],
"peed_fld": [
{
"number": "2.",
"title": "Sensory"
}
],
"peed_ele_master": [
{
"text": "I",
"icon": "icon-Autistic-Person"
},
{
"text": "Audio Desc",
"icon": "icon-audio-description"
},
{
"text": "Account",
"icon": "icon-account_circle"
},
{
"text": "Add box",
"icon": "icon-add_box"
},
{
"text": "Add",
"icon": "icon-add"
},
{
"text": "Apps",
"icon": "icon-apps-24px"
},
{
"text": "Bar Chart",
"icon": "icon-bar_chart"
},
{
"text": "Camera",
"icon": "icon-camera_alt"
},
{
"text": "Tick",
"icon": "icon-check-circle-together"
},
{
"text": "Cross",
"icon": "icon-close"
},
{
"text": "Smile",
"icon": "icon-comment-alt-smile"
},
{
"text": "Compass",
"icon": "icon-compass"
},
{
"text": "CSP",
"icon": "icon-csp-lblue"
},
{
"text": "Database",
"icon": "icon-database-solid"
},
{
"text": "Email",
"icon": "icon-email"
},
{
"text": "Fast Food",
"icon": "icon-fastfood"
},
{
"text": "Image",
"icon": "icon-image"
},
{
"text": "School",
"icon": "icon-school"
},
{
"text": "Language",
"icon": "icon-language"
},
{
"text": "No",
"icon": "icon-no"
}
],
}
return render(request, 'pictorialexperienceeditor.html', context=context)
def logout_user(request):
"""
Logout user
"""
if request.method == 'GET':
logout(request)
return redirect('index')
def upload(request):
if request.method == 'POST':
print(request.POST)
experience_text = request.POST.get('experience')
wish_different_text = request.POST.get('wish_different')
viewable = request.POST.get('viewable')
if not viewable:
viewable = 'not public'
research = request.POST.get('research')
if not research:
research = 'non-research'
if experience_text:
experience_id = str(uuid.uuid1())
output_json = {
'text': experience_text,
'wish_different': wish_different_text,
'timestamp': str(datetime.datetime.now())}
output = io.StringIO()
output.write(json.dumps(output_json))
output.seek(0)
metadata = {'tags': [viewable, research],
'uuid': experience_id,
'description': 'this is a test file'}
request.user.openhumansmember.upload(
stream=output,
filename='testfile.json',
metadata=metadata)
if viewable == 'viewable':
PublicExperience.objects.create(
experience_text=experience_text,
difference_text=wish_different_text,
open_humans_member=request.user.openhumansmember,
experience_id=experience_id)
return redirect('index')
else:
# if request.user.is_authenticated:
return render(request, 'main/upload.html')
return redirect('index')
def list_files(request):
if request.user.is_authenticated:
context = {'files': request.user.openhumansmember.list_files()}
return render(request, 'list.html',
context=context)
return redirect('index')
def list_public_experiences(request):
experiences = PublicExperience.objects.filter(approved='approved')
return render(
request,
'public_experiences.html',
context={'experiences': experiences})
def moderate_public_experiences(request):
experiences = PublicExperience.objects.filter(approved='not reviewed')
return render(
request,
'old/moderate_public_experiences.html',
context={'experiences': experiences})
def review_experience(request, experience_id):
experience = PublicExperience.objects.get(experience_id=experience_id)
print(experience)
experience.approved = 'approved'
experience.save()
print(experience.approved)
return redirect('moderate_public_experiences')
def make_non_viewable(request, oh_file_id, file_uuid):
pe = PublicExperience.objects.get(experience_id=file_uuid)
pe.delete()
oh_files = request.user.openhumansmember.list_files()
for f in oh_files:
if str(f['id']) == str(oh_file_id):
experience = requests.get(f['download_url']).json()
new_metadata = f['metadata']
new_metadata['tags'] = ['not public'] + f['metadata']['tags'][1:]
output = io.StringIO()
output.write(json.dumps(experience))
output.seek(0)
request.user.openhumansmember.upload(
stream=output,
filename='testfile.json',
metadata=new_metadata)
request.user.openhumansmember.delete_single_file(
file_id=oh_file_id)
return redirect('list')
def make_viewable(request, oh_file_id, file_uuid):
oh_files = request.user.openhumansmember.list_files()
for f in oh_files:
if str(f['id']) == str(oh_file_id):
experience = requests.get(f['download_url']).json()
new_metadata = f['metadata']
new_metadata['tags'] = ['viewable'] + f['metadata']['tags'][1:]
output = io.StringIO()
output.write(json.dumps(experience))
output.seek(0)
request.user.openhumansmember.upload(
stream=output,
filename='testfile.json',
metadata=new_metadata)
request.user.openhumansmember.delete_single_file(
file_id=oh_file_id)
PublicExperience.objects.create(
experience_text=experience['text'],
difference_text=experience['wish_different'],
open_humans_member=request.user.openhumansmember,
experience_id=file_uuid)
return redirect('list')
def make_non_research(request, oh_file_id, file_uuid):
oh_files = request.user.openhumansmember.list_files()
for f in oh_files:
if str(f['id']) == str(oh_file_id):
experience = requests.get(f['download_url']).json()
new_metadata = f['metadata']
new_metadata['tags'] = f['metadata']['tags'][:-1] + \
['non-research']
output = io.StringIO()
output.write(json.dumps(experience))
output.seek(0)
request.user.openhumansmember.upload(
stream=output,
filename='testfile.json',
metadata=new_metadata)
request.user.openhumansmember.delete_single_file(
file_id=oh_file_id)
return redirect('list')
def make_research(request, oh_file_id, file_uuid):
oh_files = request.user.openhumansmember.list_files()
for f in oh_files:
if str(f['id']) == str(oh_file_id):
experience = requests.get(f['download_url']).json()
new_metadata = f['metadata']
new_metadata['tags'] = f['metadata']['tags'][:-1] + ['research']
output = io.StringIO()
output.write(json.dumps(experience))
output.seek(0)
request.user.openhumansmember.upload(
stream=output,
filename='testfile.json',
metadata=new_metadata)
request.user.openhumansmember.delete_single_file(
file_id=oh_file_id)
return redirect('list')
|
py | b416672e6db6c008f355a91e8cd003369910bf76 | # Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the topics editor, from where topics are edited and stories
are created.
"""
from core.controllers import base
from core.domain import acl_decorators
from core.domain import story_domain
from core.domain import story_services
from core.domain import topic_domain
from core.domain import topic_services
from core.domain import user_services
import feconf
import utils
class NewStoryHandler(base.BaseHandler):
"""Creates a new story."""
@acl_decorators.can_add_new_story_to_topic
def post(self, topic_id):
"""Handles POST requests.
Currently, this only adds the story to the canonical story id list of
the topic.
"""
if not feconf.ENABLE_NEW_STRUCTURES:
raise self.PageNotFoundException
topic_domain.Topic.require_valid_topic_id(topic_id)
title = self.payload.get('title')
topic = topic_services.get_topic_by_id(topic_id, strict=False)
if topic is None:
raise self.PageNotFoundException(
Exception('The topic with the given id doesn\'t exist.'))
story_domain.Story.require_valid_title(title)
new_story_id = story_services.get_new_story_id()
story = story_domain.Story.create_default_story(
new_story_id, title=title)
story_services.save_new_story(self.user_id, story)
topic_services.add_canonical_story(self.user_id, topic_id, new_story_id)
self.render_json({
'storyId': new_story_id
})
class TopicEditorPage(base.BaseHandler):
"""The editor page for a single topic."""
@acl_decorators.can_edit_topic
def get(self, topic_id):
"""Handles GET requests."""
if not feconf.ENABLE_NEW_STRUCTURES:
raise self.PageNotFoundException
topic_domain.Topic.require_valid_topic_id(topic_id)
topic = topic_services.get_topic_by_id(topic_id, strict=False)
if topic is None:
raise self.PageNotFoundException(
Exception('The topic with the given id doesn\'t exist.'))
self.values.update({
'topic_id': topic.id
})
self.render_template('pages/topic_editor/topic_editor.html')
class EditableTopicDataHandler(base.BaseHandler):
"""A data handler for topics which supports writing."""
def _require_valid_version(self, version_from_payload, topic_version):
"""Check that the payload version matches the given topic
version.
"""
if version_from_payload is None:
raise base.BaseHandler.InvalidInputException(
'Invalid POST request: a version must be specified.')
if version_from_payload != topic_version:
raise base.BaseHandler.InvalidInputException(
'Trying to update version %s of topic from version %s, '
'which is too old. Please reload the page and try again.'
% (topic_version, version_from_payload))
@acl_decorators.can_edit_topic
def get(self, topic_id):
"""Populates the data on the individual topic page."""
if not feconf.ENABLE_NEW_STRUCTURES:
raise self.PageNotFoundException
topic_domain.Topic.require_valid_topic_id(topic_id)
topic = topic_services.get_topic_by_id(topic_id, strict=False)
if topic is None:
raise self.PageNotFoundException(
Exception('The topic with the given id doesn\'t exist.'))
self.values.update({
'topic': topic.to_dict()
})
self.render_json(self.values)
@acl_decorators.can_edit_topic
def put(self, topic_id):
"""Updates properties of the given topic."""
if not feconf.ENABLE_NEW_STRUCTURES:
raise self.PageNotFoundException
topic_domain.Topic.require_valid_topic_id(topic_id)
topic = topic_services.get_topic_by_id(topic_id, strict=False)
if topic is None:
raise self.PageNotFoundException(
Exception('The topic with the given id doesn\'t exist.'))
version = self.payload.get('version')
self._require_valid_version(version, topic.version)
commit_message = self.payload.get('commit_message')
change_dicts = self.payload.get('change_dicts')
change_list = [
topic_domain.TopicChange(change_dict)
for change_dict in change_dicts
]
try:
topic_services.update_topic(
self.user_id, topic_id, change_list, commit_message)
except utils.ValidationError as e:
raise self.InvalidInputException(e)
topic_dict = topic_services.get_topic_by_id(topic_id).to_dict()
self.values.update({
'topic': topic_dict
})
self.render_json(self.values)
@acl_decorators.can_delete_topic
def delete(self, topic_id):
"""Handles Delete requests."""
if not feconf.ENABLE_NEW_STRUCTURES:
raise self.PageNotFoundException
topic_domain.Topic.require_valid_topic_id(topic_id)
topic = topic_services.get_topic_by_id(topic_id, strict=False)
if topic is None:
raise self.PageNotFoundException(
Exception('The topic with the given id doesn\'t exist.'))
topic_services.delete_topic(self.user_id, topic_id)
class TopicManagerRightsHandler(base.BaseHandler):
"""A handler for assigning topic manager rights."""
@acl_decorators.can_manage_rights_for_topic
def put(self, topic_id, assignee_id):
"""Assign topic manager role to a user for a particular topic, if the
user has general topic manager rights.
"""
topic_domain.Topic.require_valid_topic_id(topic_id)
if assignee_id is None:
raise self.InvalidInputException(
Exception('Expected a valid assignee id to be provided.'))
assignee_actions_info = user_services.UserActionsInfo(assignee_id)
user_actions_info = user_services.UserActionsInfo(self.user_id)
try:
topic_services.assign_role(
user_actions_info, assignee_actions_info,
topic_domain.ROLE_MANAGER, topic_id)
except Exception as e:
raise self.UnauthorizedUserException(e)
self.values.update({
'role_updated': True
})
self.render_json(self.values)
|
py | b4166921f9d9123490782d65201ca7748684d730 | from .modules import PadMaxPool3d, Flatten
import torch.nn as nn
"""
All the architectures are built here
"""
class Conv5_FC3(nn.Module):
"""
Classifier for a binary classification task
Subject level architecture used on Minimal preprocessing
"""
def __init__(self, dropout=0.5):
super(Conv5_FC3, self).__init__()
self.features = nn.Sequential(
nn.Conv3d(1, 8, 3, padding=1),
nn.BatchNorm3d(8),
nn.ReLU(),
PadMaxPool3d(2, 2),
nn.Conv3d(8, 16, 3, padding=1),
nn.BatchNorm3d(16),
nn.ReLU(),
PadMaxPool3d(2, 2),
nn.Conv3d(16, 32, 3, padding=1),
nn.BatchNorm3d(32),
nn.ReLU(),
PadMaxPool3d(2, 2),
nn.Conv3d(32, 64, 3, padding=1),
nn.BatchNorm3d(64),
nn.ReLU(),
PadMaxPool3d(2, 2),
nn.Conv3d(64, 128, 3, padding=1),
nn.BatchNorm3d(128),
nn.ReLU(),
PadMaxPool3d(2, 2),
)
self.classifier = nn.Sequential(
Flatten(),
nn.Dropout(p=dropout),
nn.Linear(128 * 6 * 7 * 6, 1300),
nn.ReLU(),
nn.Linear(1300, 50),
nn.ReLU(),
nn.Linear(50, 2)
)
self.flattened_shape = [-1, 128, 6, 7, 6]
def forward(self, x):
x = self.features(x)
x = self.classifier(x)
return x
class Conv5_FC3_mni(nn.Module):
"""
Classifier for a binary classification task
Subject level architecture used on Extensive preprocessing
"""
def __init__(self, dropout=0.5):
super(Conv5_FC3_mni, self).__init__()
self.features = nn.Sequential(
nn.Conv3d(1, 8, 3, padding=1),
nn.BatchNorm3d(8),
nn.ReLU(),
PadMaxPool3d(2, 2),
nn.Conv3d(8, 16, 3, padding=1),
nn.BatchNorm3d(16),
nn.ReLU(),
PadMaxPool3d(2, 2),
nn.Conv3d(16, 32, 3, padding=1),
nn.BatchNorm3d(32),
nn.ReLU(),
PadMaxPool3d(2, 2),
nn.Conv3d(32, 64, 3, padding=1),
nn.BatchNorm3d(64),
nn.ReLU(),
PadMaxPool3d(2, 2),
nn.Conv3d(64, 128, 3, padding=1),
nn.BatchNorm3d(128),
nn.ReLU(),
PadMaxPool3d(2, 2),
)
self.classifier = nn.Sequential(
Flatten(),
nn.Dropout(p=dropout),
nn.Linear(128 * 4 * 5 * 4, 1300),
nn.ReLU(),
nn.Linear(1300, 50),
nn.ReLU(),
nn.Linear(50, 2)
)
self.flattened_shape = [-1, 128, 4, 5, 4]
def forward(self, x):
x = self.features(x)
x = self.classifier(x)
return x
|
py | b416694c5eaeb183428ef58f4ee17852724039e0 | import uuid
def get_filename(filename, request):
ext = filename.split('.')[-1]
return str(uuid.uuid4())+'.'+ext |
py | b4166a08b8af520b9b9e745ae819ddc366788f75 | #!/usr/bin/env python
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import fakeredis
from zope.interface import implementer
from typing import Union
from dino.auth import IAuth
__author__ = 'Oscar Eriksson <[email protected]>'
@implementer(IAuth)
class AllowAllAuth(object):
def __init__(self):
self.redis = fakeredis.FakeStrictRedis()
def get_user_info(self, user_id: str) -> dict:
return dict()
def authenticate_and_populate_session(self, user_id: str, token: str) -> (bool, Union[None, str], Union[None, dict]):
return True, None, {'user_id': user_id, 'token': token, 'user_name': 'user_name'}
def update_session_for_key(self, user_id: str, session_key: str, session_value: str) -> None:
pass
@implementer(IAuth)
class DenyAllAuth(object):
def __init__(self):
self.redis = fakeredis.FakeStrictRedis()
def get_user_info(self, user_id: str) -> dict:
return dict()
def authenticate_and_populate_session(self, user_id: str, token: str) -> (bool, Union[None, str], Union[None, dict]):
return False, 'not allowed', None
|
py | b4166abef1c91ecf58c87f9aabd2f2141c4913ae | """Logic expressions handling
NOTE
----
at present this is mainly needed for facts.py, feel free however to improve
this stuff for general purpose.
"""
from typing import Dict as tDict, Type, Union as tUnion
# Type of a fuzzy bool
FuzzyBool = tUnion[bool, None]
def _torf(args):
"""Return True if all args are True, False if they
are all False, else None.
>>> from sympy.core.logic import _torf
>>> _torf((True, True))
True
>>> _torf((False, False))
False
>>> _torf((True, False))
"""
sawT = sawF = False
for a in args:
if a is True:
if sawF:
return
sawT = True
elif a is False:
if sawT:
return
sawF = True
else:
return
return sawT
def _fuzzy_group(args, quick_exit=False):
"""Return True if all args are True, None if there is any None else False
unless ``quick_exit`` is True (then return None as soon as a second False
is seen.
``_fuzzy_group`` is like ``fuzzy_and`` except that it is more
conservative in returning a False, waiting to make sure that all
arguments are True or False and returning None if any arguments are
None. It also has the capability of permiting only a single False and
returning None if more than one is seen. For example, the presence of a
single transcendental amongst rationals would indicate that the group is
no longer rational; but a second transcendental in the group would make the
determination impossible.
Examples
========
>>> from sympy.core.logic import _fuzzy_group
By default, multiple Falses mean the group is broken:
>>> _fuzzy_group([False, False, True])
False
If multiple Falses mean the group status is unknown then set
`quick_exit` to True so None can be returned when the 2nd False is seen:
>>> _fuzzy_group([False, False, True], quick_exit=True)
But if only a single False is seen then the group is known to
be broken:
>>> _fuzzy_group([False, True, True], quick_exit=True)
False
"""
saw_other = False
for a in args:
if a is True:
continue
if a is None:
return
if quick_exit and saw_other:
return
saw_other = True
return not saw_other
def fuzzy_bool(x):
"""Return True, False or None according to x.
Whereas bool(x) returns True or False, fuzzy_bool allows
for the None value and non-false values (which become None), too.
Examples
========
>>> from sympy.core.logic import fuzzy_bool
>>> from sympy.abc import x
>>> fuzzy_bool(x), fuzzy_bool(None)
(None, None)
>>> bool(x), bool(None)
(True, False)
"""
if x is None:
return None
if x in (True, False):
return bool(x)
def fuzzy_and(args):
"""Return True (all True), False (any False) or None.
Examples
========
>>> from sympy.core.logic import fuzzy_and
>>> from sympy import Dummy
If you had a list of objects to test the commutivity of
and you want the fuzzy_and logic applied, passing an
iterator will allow the commutativity to only be computed
as many times as necessary. With this list, False can be
returned after analyzing the first symbol:
>>> syms = [Dummy(commutative=False), Dummy()]
>>> fuzzy_and(s.is_commutative for s in syms)
False
That False would require less work than if a list of pre-computed
items was sent:
>>> fuzzy_and([s.is_commutative for s in syms])
False
"""
rv = True
for ai in args:
ai = fuzzy_bool(ai)
if ai is False:
return False
if rv: # this will stop updating if a None is ever trapped
rv = ai
return rv
def fuzzy_not(v):
"""
Not in fuzzy logic
Return None if `v` is None else `not v`.
Examples
========
>>> from sympy.core.logic import fuzzy_not
>>> fuzzy_not(True)
False
>>> fuzzy_not(None)
>>> fuzzy_not(False)
True
"""
if v is None:
return v
else:
return not v
def fuzzy_or(args):
"""
Or in fuzzy logic. Returns True (any True), False (all False), or None
See the docstrings of fuzzy_and and fuzzy_not for more info. fuzzy_or is
related to the two by the standard De Morgan's law.
>>> from sympy.core.logic import fuzzy_or
>>> fuzzy_or([True, False])
True
>>> fuzzy_or([True, None])
True
>>> fuzzy_or([False, False])
False
>>> print(fuzzy_or([False, None]))
None
"""
rv = False
for ai in args:
ai = fuzzy_bool(ai)
if ai is True:
return True
if rv is False: # this will stop updating if a None is ever trapped
rv = ai
return rv
def fuzzy_xor(args):
"""Return None if any element of args is not True or False, else
True (if there are an odd number of True elements), else False."""
t = f = 0
for a in args:
ai = fuzzy_bool(a)
if ai:
t += 1
elif ai is False:
f += 1
else:
return
return t % 2 == 1
def fuzzy_nand(args):
"""Return False if all args are True, True if they are all False,
else None."""
return fuzzy_not(fuzzy_and(args))
class Logic:
"""Logical expression"""
# {} 'op' -> LogicClass
op_2class = {} # type: tDict[str, Type[Logic]]
def __new__(cls, *args):
obj = object.__new__(cls)
obj.args = args
return obj
def __getnewargs__(self):
return self.args
def __hash__(self):
return hash((type(self).__name__,) + tuple(self.args))
def __eq__(a, b):
if not isinstance(b, type(a)):
return False
else:
return a.args == b.args
def __ne__(a, b):
if not isinstance(b, type(a)):
return True
else:
return a.args != b.args
def __lt__(self, other):
if self.__cmp__(other) == -1:
return True
return False
def __cmp__(self, other):
if type(self) is not type(other):
a = str(type(self))
b = str(type(other))
else:
a = self.args
b = other.args
return (a > b) - (a < b)
def __str__(self):
return '%s(%s)' % (self.__class__.__name__,
', '.join(str(a) for a in self.args))
__repr__ = __str__
@staticmethod
def fromstring(text):
"""Logic from string with space around & and | but none after !.
e.g.
!a & b | c
"""
lexpr = None # current logical expression
schedop = None # scheduled operation
for term in text.split():
# operation symbol
if term in '&|':
if schedop is not None:
raise ValueError(
'double op forbidden: "%s %s"' % (term, schedop))
if lexpr is None:
raise ValueError(
'%s cannot be in the beginning of expression' % term)
schedop = term
continue
if '&' in term or '|' in term:
raise ValueError('& and | must have space around them')
if term[0] == '!':
if len(term) == 1:
raise ValueError('do not include space after "!"')
term = Not(term[1:])
# already scheduled operation, e.g. '&'
if schedop:
lexpr = Logic.op_2class[schedop](lexpr, term)
schedop = None
continue
# this should be atom
if lexpr is not None:
raise ValueError(
'missing op between "%s" and "%s"' % (lexpr, term))
lexpr = term
# let's check that we ended up in correct state
if schedop is not None:
raise ValueError('premature end-of-expression in "%s"' % text)
if lexpr is None:
raise ValueError('"%s" is empty' % text)
# everything looks good now
return lexpr
class AndOr_Base(Logic):
def __new__(cls, *args):
bargs = []
for a in args:
if a == cls.op_x_notx:
return a
elif a == (not cls.op_x_notx):
continue # skip this argument
bargs.append(a)
args = sorted(set(cls.flatten(bargs)), key=hash)
for a in args:
if Not(a) in args:
return cls.op_x_notx
if len(args) == 1:
return args.pop()
elif len(args) == 0:
return not cls.op_x_notx
return Logic.__new__(cls, *args)
@classmethod
def flatten(cls, args):
# quick-n-dirty flattening for And and Or
args_queue = list(args)
res = []
while True:
try:
arg = args_queue.pop(0)
except IndexError:
break
if isinstance(arg, Logic):
if isinstance(arg, cls):
args_queue.extend(arg.args)
continue
res.append(arg)
args = tuple(res)
return args
class And(AndOr_Base):
op_x_notx = False
def _eval_propagate_not(self):
# !(a&b&c ...) == !a | !b | !c ...
return Or(*[Not(a) for a in self.args])
# (a|b|...) & c == (a&c) | (b&c) | ...
def expand(self):
# first locate Or
for i in range(len(self.args)):
arg = self.args[i]
if isinstance(arg, Or):
arest = self.args[:i] + self.args[i + 1:]
orterms = [And(*(arest + (a,))) for a in arg.args]
for j in range(len(orterms)):
if isinstance(orterms[j], Logic):
orterms[j] = orterms[j].expand()
res = Or(*orterms)
return res
return self
class Or(AndOr_Base):
op_x_notx = True
def _eval_propagate_not(self):
# !(a|b|c ...) == !a & !b & !c ...
return And(*[Not(a) for a in self.args])
class Not(Logic):
def __new__(cls, arg):
if isinstance(arg, str):
return Logic.__new__(cls, arg)
elif isinstance(arg, bool):
return not arg
elif isinstance(arg, Not):
return arg.args[0]
elif isinstance(arg, Logic):
# XXX this is a hack to expand right from the beginning
arg = arg._eval_propagate_not()
return arg
else:
raise ValueError('Not: unknown argument %r' % (arg,))
@property
def arg(self):
return self.args[0]
Logic.op_2class['&'] = And
Logic.op_2class['|'] = Or
Logic.op_2class['!'] = Not
|
py | b4166b17882c1e2ba4f44884d4d74bc925fc6cbf | from django.test import TestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework.test import APIClient
from rest_framework import status
CREATE_USER_URL = reverse('user:create')
TOKEN_URL = reverse('user:token')
ME_URL = reverse('user:me')
def create_user(**params):
return get_user_model().objects.create_user(**params)
class PublicUserApiTests(TestCase):
"""Test the user API (public)"""
def setUp(self):
self.client = APIClient()
def test_create_valid_user_success(self):
"""Test creating user with valid payload is successful"""
payload = {
'email': '[email protected]',
'password': 'password123',
'name': 'Test user'
}
response = self.client.post(CREATE_USER_URL, payload)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
user = get_user_model().objects.get(**response.data)
self.assertTrue(user.check_password(payload['password']))
self.assertNotIn('password', response.data)
def test_user_exists(self):
"""Test create user that already exists fails"""
payload = {
'email': '[email protected]',
'password': 'password123',
'name': 'Test user'
}
create_user(**payload)
response = self.client.post(CREATE_USER_URL, payload)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_password_too_short(self):
"""Test that the password must be more than five characters"""
payload = {
'email': '[email protected]',
'password': 'pw',
'name': 'Test user'
}
response = self.client.post(CREATE_USER_URL, payload)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
user_exists = get_user_model().objects.filter(
email=payload['email']
).exists()
self.assertFalse(user_exists)
def test_create_token_for_user(self):
"""Test that a token is created for the user"""
payload = {
'email': '[email protected]',
'password': 'password123',
'name': 'Test user'
}
create_user(**payload)
response = self.client.post(TOKEN_URL, payload)
self.assertIn('token', response.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_create_token_invalid_credentials(self):
"""Test token is not created if invalid credentials are given"""
create_user(
email='[email protected]',
password='password123'
)
payload = {
'email': '[email protected]',
'password': 'wrongpassword'
}
response = self.client.post(TOKEN_URL, payload)
self.assertNotIn('token', response.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_token_no_user(self):
"""Test that token is not created if user doesn't exist"""
payload = {
'email': '[email protected]',
'password': 'password123'
}
response = self.client.post(TOKEN_URL, payload)
self.assertNotIn('token', response.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_token_missing_fields(self):
"""Test that email and password are required"""
response = self.client.post(TOKEN_URL, {
'email': 'one',
'password': ''
})
self.assertNotIn('token', response.data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve_user_unauthorized(self):
"""Test that authentication is required for users"""
response = self.client.get(ME_URL)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateUserApiTests(TestCase):
"""Test API requests that require authentication"""
def setUp(self):
self.user = create_user(
email='[email protected]',
password='password123',
name='Test User'
)
self.client = APIClient()
self.client.force_authenticate(user=self.user)
def test_retrieve_profile_success(self):
"""Test retrieving profile for logged user"""
response = self.client.get(ME_URL)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, {
'name': self.user.name,
'email': self.user.email
})
def test_post_me_not_allowed(self):
"""Test that post is not allowed on the me url"""
response = self.client.post(ME_URL, {})
self.assertEqual(
response.status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
def test_update_user_profile(self):
"""Test update the user profile for authenticated user"""
payload = {
'name': 'New Name',
'password': 'newpassword'
}
response = self.client.patch(ME_URL, payload)
self.user.refresh_from_db()
self.assertEqual(self.user.name, payload['name'])
self.assertTrue(self.user.check_password(payload['password']))
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
py | b4166b352c43454e1b0728c8e9b6008135dda2e2 | # -*- coding: utf-8 -*-
"""Collection of functions for the manipulation of time series."""
from __future__ import absolute_import, division, print_function
import itertools
import os
import warnings
import mando
import numpy as np
import pandas as pd
from mando.rst_text_formatter import RSTHelpFormatter
from tstoolbox import tsutils
from .. import plotutils
warnings.filterwarnings("ignore")
@mando.command("barh_stacked", formatter_class=RSTHelpFormatter, doctype="numpy")
@tsutils.doc(plotutils.ldocstrings)
def barh_stacked_cli(
input_ts="-",
columns=None,
start_date=None,
end_date=None,
clean=False,
skiprows=None,
index_type="datetime",
names=None,
ofilename="plot.png",
xtitle="",
ytitle="",
title="",
figsize="10,6.0",
legend=None,
legend_names=None,
subplots=False,
sharex=True,
sharey=False,
colors="auto",
linestyles="auto",
markerstyles=" ",
bar_hatchstyles="auto",
style="auto",
logx=False,
logy=False,
xaxis="arithmetic",
yaxis="arithmetic",
xlim=None,
ylim=None,
secondary_y=False,
mark_right=True,
scatter_matrix_diagonal="kde",
bootstrap_size=50,
bootstrap_samples=500,
norm_xaxis=False,
norm_yaxis=False,
lognorm_xaxis=False,
lognorm_yaxis=False,
xy_match_line="",
grid=False,
label_rotation=None,
label_skip=1,
force_freq=None,
drawstyle="default",
por=False,
invert_xaxis=False,
invert_yaxis=False,
round_index=None,
plotting_position="weibull",
prob_plot_sort_values="descending",
source_units=None,
target_units=None,
lag_plot_lag=1,
plot_styles="bright",
hlines_y=None,
hlines_xmin=None,
hlines_xmax=None,
hlines_colors=None,
hlines_linestyles="-",
vlines_x=None,
vlines_ymin=None,
vlines_ymax=None,
vlines_colors=None,
vlines_linestyles="-",
):
r"""Horizontal stacked bar plot.
"barh_stacked" makes a horizontal stacked bar plot.
Parameters
----------
{input_ts}
{ofilename}
{xtitle}
{ytitle}
{title}
{figsize}
{legend}
{legend_names}
{subplots}
{sharex}
{sharey}
{colors}
{linestyles}
{markerstyles}
{style}
{bar_hatchstyles}
{xlim}
{xaxis}
{yaxis}
secondary_x
{secondary}
{mark_right}
{grid}
{label_rotation}
{label_skip}
{por}
{force_freq}
{invert_xaxis}
{columns}
{start_date}
{end_date}
{clean}
{skiprows}
{index_type}
{names}
{source_units}
{target_units}
{round_index}
{plot_styles}
{vlines_x}
{vlines_colors}
{vlines_linestyles}
"""
plt = bar_h_stacked(
input_ts=input_ts,
columns=columns,
start_date=start_date,
end_date=end_date,
clean=clean,
skiprows=skiprows,
index_type=index_type,
names=names,
ofilename=ofilename,
xtitle=xtitle,
ytitle=ytitle,
title=title,
figsize=figsize,
legend=legend,
legend_names=legend_names,
subplots=subplots,
sharex=sharex,
sharey=sharey,
colors=colors,
linestyles=linestyles,
markerstyles=markerstyles,
bar_hatchstyles=bar_hatchstyles,
style=style,
logx=logx,
logy=logy,
xaxis=xaxis,
yaxis=yaxis,
xlim=xlim,
ylim=ylim,
secondary_y=secondary_y,
mark_right=mark_right,
scatter_matrix_diagonal=scatter_matrix_diagonal,
bootstrap_size=bootstrap_size,
bootstrap_samples=bootstrap_samples,
norm_xaxis=norm_xaxis,
norm_yaxis=norm_yaxis,
lognorm_xaxis=lognorm_xaxis,
lognorm_yaxis=lognorm_yaxis,
xy_match_line=xy_match_line,
grid=grid,
label_rotation=label_rotation,
label_skip=label_skip,
force_freq=force_freq,
drawstyle=drawstyle,
por=por,
invert_xaxis=invert_xaxis,
invert_yaxis=invert_yaxis,
round_index=round_index,
plotting_position=plotting_position,
prob_plot_sort_values=prob_plot_sort_values,
source_units=source_units,
target_units=target_units,
lag_plot_lag=lag_plot_lag,
plot_styles=plot_styles,
hlines_y=hlines_y,
hlines_xmin=hlines_xmin,
hlines_xmax=hlines_xmax,
hlines_colors=hlines_colors,
hlines_linestyles=hlines_linestyles,
vlines_x=vlines_x,
vlines_ymin=vlines_ymin,
vlines_ymax=vlines_ymax,
vlines_colors=vlines_colors,
vlines_linestyles=vlines_linestyles,
)
# @tsutils.validator(
# ofilename=[str, ["pass", []], 1],
# type=[str, ["domain", ["barh_stacked",],], 1,],
# lag_plot_lag=[int, ["range", [1, None]], 1],
# xtitle=[str, ["pass", []], 1],
# ytitle=[str, ["pass", []], 1],
# title=[str, ["pass", []], 1],
# figsize=[float, ["range", [0, None]], 2],
# legend=[bool, ["domain", [True, False]], 1],
# legend_names=[str, ["pass", []], 1],
# subplots=[bool, ["domain", [True, False]], 1],
# sharex=[bool, ["domain", [True, False]], 1],
# sharey=[bool, ["domain", [True, False]], 1],
# colors=[str, ["pass", []], None],
# linestyles=[str, ["domain", ["auto", None, "", " ", " "] + plotutils.LINE_LIST], None],
# markerstyles=[str, ["domain", ["auto", None, "", " ", " "] + plotutils.MARKER_LIST], None],
# bar_hatchstyles=[str, ["domain", ["auto", None, "", " ", " "] + plotutils.HATCH_LIST], None],
# style=[str, ["pass", []], None],
# xlim=[float, ["pass", []], 2],
# ylim=[float, ["pass", []], 2],
# xaxis=[str, ["domain", ["arithmetic", "log"]], 1],
# yaxis=[str, ["domain", ["arithmetic", "log"]], 1],
# secondary_y=[bool, ["domain", [True, False]], 1],
# mark_right=[bool, ["domain", [True, False]], 1],
# scatter_matrix_diagonal=[str, ["domain", ["kde", "hist"]], 1],
# bootstrap_size=[int, ["range", [0, None]], 1],
# xy_match_line=[str, ["pass", []], 1],
# grid=[bool, ["domain", [True, False]], 1],
# label_rotation=[float, ["pass", []], 1],
# label_skip=[int, ["range", [1, None]], 1],
# drawstyle=[str, ["pass", []], 1],
# por=[bool, ["domain", [True, False]], 1],
# invert_xaxis=[bool, ["domain", [True, False]], 1],
# invert_yaxis=[bool, ["domain", [True, False]], 1],
# plotting_position=[
# str,
# [
# "domain",
# ["weibull", "benard", "tukey", "gumbel", "hazen", "cunnane", "california"],
# ],
# 1,
# ],
# prob_plot_sort_values=[str, ["domain", ["ascending", "descending"]], 1],
# plot_styles=[
# str,
# [
# "domain",
# [
# "classic",
# "Solarize_Light2",
# "bmh",
# "dark_background",
# "fast",
# "fivethirtyeight",
# "ggplot",
# "grayscale",
# "seaborn",
# "seaborn-bright",
# "seaborn-colorblind",
# "seaborn-dark",
# "seaborn-dark-palette",
# "seaborn-darkgrid",
# "seaborn-deep",
# "seaborn-muted",
# "seaborn-notebook",
# "seaborn-paper",
# "seaborn-pastel",
# "seaborn-poster",
# "seaborn-talk",
# "seaborn-ticks",
# "seaborn-white",
# "seaborn-whitegrid",
# "tableau-colorblind10",
# "science",
# "grid",
# "ieee",
# "scatter",
# "notebook",
# "high-vis",
# "bright",
# "vibrant",
# "muted",
# "retro",
# ],
# ],
# None,
# ],
# hlines_y=[float, ["pass", []], None],
# hlines_xmin=[float, ["pass", []], None],
# hlines_xmax=[float, ["pass", []], None],
# hlines_colors=[str, ["pass", []], None],
# hlines_linestyles=[
# str,
# ["domain", ["auto", None, "", " ", " "] + plotutils.LINE_LIST],
# None,
# ],
# vlines_x=[float, ["pass", []], None],
# vlines_ymin=[float, ["pass", []], None],
# vlines_ymax=[float, ["pass", []], None],
# vlines_colors=[str, ["pass", []], None],
# vlines_linestyles=[
# str,
# ["domain", ["auto", None, "", " ", " "] + plotutils.LINE_LIST],
# None,
# ],
# )
def barh_stacked(
input_ts="-",
columns=None,
start_date=None,
end_date=None,
clean=False,
skiprows=None,
index_type="datetime",
names=None,
ofilename="plot.png",
xtitle="",
ytitle="",
title="",
figsize="10,6.0",
legend=None,
legend_names=None,
subplots=False,
sharex=True,
sharey=False,
colors="auto",
linestyles="auto",
markerstyles=" ",
bar_hatchstyles="auto",
style="auto",
logx=False,
logy=False,
xaxis="arithmetic",
yaxis="arithmetic",
xlim=None,
ylim=None,
secondary_y=False,
mark_right=True,
scatter_matrix_diagonal="kde",
bootstrap_size=50,
bootstrap_samples=500,
norm_xaxis=False,
norm_yaxis=False,
lognorm_xaxis=False,
lognorm_yaxis=False,
xy_match_line="",
grid=False,
label_rotation=None,
label_skip=1,
force_freq=None,
drawstyle="default",
por=False,
invert_xaxis=False,
invert_yaxis=False,
round_index=None,
plotting_position="weibull",
prob_plot_sort_values="descending",
source_units=None,
target_units=None,
lag_plot_lag=1,
plot_styles="bright",
hlines_y=None,
hlines_xmin=None,
hlines_xmax=None,
hlines_colors=None,
hlines_linestyles="-",
vlines_x=None,
vlines_ymin=None,
vlines_ymax=None,
vlines_colors=None,
vlines_linestyles="-",
**kwds,
):
r"""Plot data."""
# Need to work around some old option defaults with the implementation of
# mando
legend = bool(legend == "" or legend == "True" or legend is None)
type = "barh_stacked"
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
from matplotlib.ticker import FixedLocator
tsd = tsutils.common_kwds(
input_ts,
skiprows=skiprows,
names=names,
index_type=index_type,
start_date=start_date,
end_date=end_date,
pick=columns,
round_index=round_index,
dropna="all",
source_units=source_units,
target_units=target_units,
clean=clean,
por=por,
)
tsd, lnames = plotutils.check(type, tsd, legend_names)
# This is to help pretty print the frequency
try:
try:
pltfreq = str(tsd.index.freq, "utf-8").lower()
except TypeError:
pltfreq = str(tsd.index.freq).lower()
if pltfreq.split(" ")[0][1:] == "1":
beginstr = 3
else:
beginstr = 1
if pltfreq == "none":
short_freq = ""
else:
# short freq string (day) OR (2 day)
short_freq = "({})".format(pltfreq[beginstr:-1])
except AttributeError:
short_freq = ""
if colors == "auto":
colors = None
else:
colors = tsutils.make_list(colors)
if linestyles == "auto":
linestyles = plotutils.LINE_LIST
else:
linestyles = tsutils.make_list(linestyles)
if bar_hatchstyles == "auto":
bar_hatchstyles = plotutils.HATCH_LIST
else:
bar_hatchstyles = tsutils.make_list(bar_hatchstyles)
if markerstyles == "auto":
markerstyles = plotutils.MARKER_LIST
else:
markerstyles = tsutils.make_list(markerstyles)
if markerstyles is None:
markerstyles = " "
if style != "auto":
nstyle = tsutils.make_list(style)
if len(nstyle) != len(tsd.columns):
raise ValueError(
tsutils.error_wrapper(
"""
You have to have the same number of style strings as time-series to plot.
You supplied '{}' for style which has {} style strings,
but you have {} time-series.
""".format(
style, len(nstyle), len(tsd.columns)
)
)
)
colors = []
markerstyles = []
linestyles = []
for st in nstyle:
colors.append(st[0])
if len(st) == 1:
markerstyles.append(" ")
linestyles.append("-")
continue
if st[1] in plotutils.MARKER_LIST:
markerstyles.append(st[1])
try:
linestyles.append(st[2:])
except IndexError:
linestyles.append(" ")
else:
markerstyles.append(" ")
linestyles.append(st[1:])
if linestyles is None:
linestyles = [" "]
else:
linestyles = [" " if i in [" ", None] else i for i in linestyles]
markerstyles = [" " if i is None else i for i in markerstyles]
if colors is not None:
icolors = itertools.cycle(colors)
else:
icolors = None
imarkerstyles = itertools.cycle(markerstyles)
ilinestyles = itertools.cycle(linestyles)
# Only for bar, barh, bar_stacked, and barh_stacked.
ibar_hatchstyles = itertools.cycle(bar_hatchstyles)
if (
logx is True
or logy is True
or norm_xaxis is True
or norm_yaxis is True
or lognorm_xaxis is True
or lognorm_yaxis is True
):
warnings.warn(
"""
*
* The --logx, --logy, --norm_xaxis, --norm_yaxis, --lognorm_xaxis, and
* --lognorm_yaxis options are deprecated.
*
* For --logx use --xaxis="log"
* For --logy use --yaxis="log"
* For --norm_xaxis use --type="norm_xaxis"
* For --norm_yaxis use --type="norm_yaxis"
* For --lognorm_xaxis use --type="lognorm_xaxis"
* For --lognorm_yaxis use --type="lognorm_yaxis"
*
"""
)
if xaxis == "log":
logx = True
if yaxis == "log":
logy = True
xlim = plotutils.know_your_limits(xlim, axis=xaxis)
ylim = plotutils.know_your_limits(ylim, axis=yaxis)
plot_styles = tsutils.make_list(plot_styles) + ["no-latex"]
style_loc = os.path.join(
os.path.dirname(__file__), os.pardir, "SciencePlots_styles"
)
plot_styles = [
os.path.join(style_loc, i + ".mplstyle")
if os.path.exists(os.path.join(style_loc, i + ".mplstyle"))
else i
for i in plot_styles
]
plt.style.use(plot_styles)
figsize = tsutils.make_list(figsize, n=2)
_, ax = plt.subplots(figsize=figsize)
if type in ("bar", "bar_stacked", "barh", "barh_stacked"):
stacked = False
if type[-7:] == "stacked":
stacked = True
kind = "bar"
if type[:4] == "barh":
kind = "barh"
if icolors is not None:
c = [next(icolors) for i in range(len(tsd.columns))]
else:
c = None
tsd.plot(
ax=ax,
kind=kind,
legend=legend,
stacked=stacked,
logx=logx,
logy=logy,
xlim=xlim,
ylim=ylim,
figsize=figsize,
linestyle=None,
color=c,
)
hatches = [next(ibar_hatchstyles) for i in range(len(tsd.columns))]
hatches = "".join(h * len(tsd.index) for h in hatches)
for patch, hatch in zip(ax.patches, hatches):
patch.set_hatch(hatch)
freq = tsutils.asbestfreq(tsd, force_freq=force_freq).index.freqstr
if freq is not None:
if "A" in freq:
endchar = 4
elif "M" in freq:
endchar = 7
elif "D" in freq:
endchar = 10
elif "H" in freq:
endchar = 13
else:
endchar = None
nticklabels = []
if kind == "bar":
taxis = ax.xaxis
else:
taxis = ax.yaxis
for index, i in enumerate(taxis.get_majorticklabels()):
if index % label_skip:
nticklabels.append(" ")
else:
nticklabels.append(i.get_text()[:endchar])
taxis.set_ticklabels(nticklabels)
plt.setp(taxis.get_majorticklabels(), rotation=label_rotation)
if legend is True:
plt.legend(loc="best")
if hlines_y is not None:
hlines_y = tsutils.make_list(hlines_y)
hlines_xmin = tsutils.make_list(hlines_xmin)
hlines_xmax = tsutils.make_list(hlines_xmax)
hlines_colors = tsutils.make_list(hlines_colors)
hlines_linestyles = tsutils.make_list(hlines_linestyles)
nxlim = ax.get_xlim()
if hlines_xmin is None:
hlines_xmin = nxlim[0]
if hlines_xmax is None:
hlines_xmax = nxlim[1]
if vlines_x is not None:
vlines_x = tsutils.make_list(vlines_x)
vlines_ymin = tsutils.make_list(vlines_ymin)
vlines_ymax = tsutils.make_list(vlines_ymax)
vlines_colors = tsutils.make_list(vlines_colors)
vlines_linestyles = tsutils.make_list(vlines_linestyles)
nylim = ax.get_ylim()
if vlines_ymin is None:
vlines_ymin = nylim[0]
if vlines_ymax is None:
vlines_ymax = nylim[1]
if type in [
"time",
"xy",
"bar",
"bar_stacked",
"histogram",
"norm_xaxis",
"lognorm_xaxis",
"weibull_xaxis",
"norm_yaxis",
"lognorm_yaxis",
"weibull_yaxis",
]:
if hlines_y is not None:
if type in ["norm_yaxis", "lognorm_yaxis", "weibull_yaxis"]:
hlines_y = ppf(tsutils.make_list(hlines_y))
plt.hlines(
hlines_y,
hlines_xmin,
hlines_xmax,
colors=hlines_colors,
linestyles=hlines_linestyles,
)
if vlines_x is not None:
if type in ["norm_xaxis", "lognorm_xaxis", "weibull_xaxis"]:
vlines_x = ppf(tsutils.make_list(vlines_x))
plt.vlines(
vlines_x,
vlines_ymin,
vlines_ymax,
colors=vlines_colors,
linestyles=vlines_linestyles,
)
plt.xlabel(xtitle)
plt.ylabel(ytitle)
if invert_xaxis is True:
plt.gca().invert_xaxis()
if invert_yaxis is True:
plt.gca().invert_yaxis()
plt.grid(grid)
plt.title(title)
plt.tight_layout()
if ofilename is not None:
plt.savefig(ofilename)
return plt
barh_stacked.__doc__ = barh_stacked_cli.__doc__
|
py | b4166d8478839e18730117e7260fba6b752e2c72 | import json
from chalicelib.core import authorizers
from chalicelib.utils import helper
from chalicelib.utils import pg_client
from chalicelib.utils import dev
from chalicelib.utils.TimeUTC import TimeUTC
from chalicelib.utils.helper import environ
from chalicelib.core import tenants
def create_new_member(email, password, admin, name, owner=False):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
WITH u AS (
INSERT INTO public.users (email, role, name, data)
VALUES (%(email)s, %(role)s, %(name)s, %(data)s)
RETURNING user_id,email,role,name,appearance
),
au AS (INSERT
INTO public.basic_authentication (user_id, password, generated_password)
VALUES ((SELECT user_id FROM u), crypt(%(password)s, gen_salt('bf', 12)), TRUE))
SELECT u.user_id AS id,
u.email,
u.role,
u.name,
TRUE AS change_password,
(CASE WHEN u.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN u.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN u.role = 'member' THEN TRUE ELSE FALSE END) AS member,
u.appearance
FROM u;""",
{"email": email, "password": password,
"role": "owner" if owner else "admin" if admin else "member", "name": name,
"data": json.dumps({"lastAnnouncementView": TimeUTC.now()})})
cur.execute(
query
)
return helper.dict_to_camel_case(cur.fetchone())
def restore_member(user_id, email, password, admin, name, owner=False):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(f"""\
UPDATE public.users
SET name= %(name)s,
role = %(role)s,
deleted_at= NULL,
created_at = timezone('utc'::text, now()),
api_key= generate_api_key(20)
WHERE user_id=%(user_id)s
RETURNING user_id AS id,
email,
role,
name,
TRUE AS change_password,
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member,
appearance;""",
{"user_id": user_id, "email": email,
"role": "owner" if owner else "admin" if admin else "member", "name": name})
cur.execute(
query
)
result = helper.dict_to_camel_case(cur.fetchone())
query = cur.mogrify("""\
UPDATE public.basic_authentication
SET password= crypt(%(password)s, gen_salt('bf', 12)),
generated_password= TRUE,
token=NULL,
token_requested_at=NULL
WHERE user_id=%(user_id)s;""",
{"user_id": user_id, "password": password})
cur.execute(
query
)
return result
def update(tenant_id, user_id, changes):
AUTH_KEYS = ["password", "generatedPassword", "token"]
if len(changes.keys()) == 0:
return None
sub_query_users = []
sub_query_bauth = []
for key in changes.keys():
if key in AUTH_KEYS:
if key == "password":
sub_query_bauth.append("password = crypt(%(password)s, gen_salt('bf', 12))")
sub_query_bauth.append("changed_at = timezone('utc'::text, now())")
elif key == "token":
if changes[key] is not None:
sub_query_bauth.append("token = %(token)s")
sub_query_bauth.append("token_requested_at = timezone('utc'::text, now())")
else:
sub_query_bauth.append("token = NULL")
sub_query_bauth.append("token_requested_at = NULL")
else:
sub_query_bauth.append(f"{helper.key_to_snake_case(key)} = %({key})s")
else:
if key == "appearance":
sub_query_users.append(f"appearance = %(appearance)s::jsonb")
changes["appearance"] = json.dumps(changes[key])
else:
sub_query_users.append(f"{helper.key_to_snake_case(key)} = %({key})s")
with pg_client.PostgresClient() as cur:
if len(sub_query_users) > 0:
cur.execute(
cur.mogrify(f"""\
UPDATE public.users
SET {" ,".join(sub_query_users)}
FROM public.basic_authentication
WHERE users.user_id = %(user_id)s
AND users.user_id = basic_authentication.user_id
RETURNING users.user_id AS id,
users.email,
users.role,
users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance;""",
{"user_id": user_id, **changes})
)
if len(sub_query_bauth) > 0:
cur.execute(
cur.mogrify(f"""\
UPDATE public.basic_authentication
SET {" ,".join(sub_query_bauth)}
FROM public.users AS users
WHERE basic_authentication.user_id = %(user_id)s
AND users.user_id = basic_authentication.user_id
RETURNING users.user_id AS id,
users.email,
users.role,
users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance;""",
{"user_id": user_id, **changes})
)
return helper.dict_to_camel_case(cur.fetchone())
def create_member(tenant_id, user_id, data):
admin = get(tenant_id=tenant_id, user_id=user_id)
if not admin["admin"] and not admin["superAdmin"]:
return {"errors": ["unauthorized"]}
if data.get("userId") is not None:
return {"errors": ["please use POST/PUT /client/members/{memberId} for update"]}
user = get_by_email_only(email=data["email"])
if user:
return {"errors": ["user already exists"]}
name = data.get("name", None)
if name is not None and not helper.is_alphabet_latin_space(name):
return {"errors": ["invalid user name"]}
if name is None:
name = data["email"]
temp_pass = helper.generate_salt()[:8]
user = get_deleted_user_by_email(email=data["email"])
if user is not None:
new_member = restore_member(email=data["email"], password=temp_pass,
admin=data.get("admin", False), name=name, user_id=user["userId"])
else:
new_member = create_new_member(email=data["email"], password=temp_pass,
admin=data.get("admin", False), name=name)
helper.async_post(environ['email_basic'] % 'member_invitation',
{
"email": data["email"],
"userName": data["email"],
"tempPassword": temp_pass,
"clientId": tenants.get_by_tenant_id(tenant_id)["name"],
"senderName": admin["name"]
})
return {"data": new_member}
def get(user_id, tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""SELECT
users.user_id AS id,
email,
role,
name,
basic_authentication.generated_password,
(CASE WHEN role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN role = 'member' THEN TRUE ELSE FALSE END) AS member,
appearance,
api_key
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
WHERE
users.user_id = %(userId)s
AND deleted_at IS NULL
LIMIT 1;""",
{"userId": user_id})
)
r = cur.fetchone()
return helper.dict_to_camel_case(r, ignore_keys=["appearance"])
def generate_new_api_key(user_id):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""UPDATE public.users
SET api_key=generate_api_key(20)
WHERE
users.user_id = %(userId)s
AND deleted_at IS NULL
RETURNING api_key;""",
{"userId": user_id})
)
r = cur.fetchone()
return helper.dict_to_camel_case(r)
def edit(user_id_to_update, tenant_id, changes, editor_id):
ALLOW_EDIT = ["name", "email", "admin", "appearance"]
user = get(user_id=user_id_to_update, tenant_id=tenant_id)
if editor_id != user_id_to_update or "admin" in changes and changes["admin"] != user["admin"]:
admin = get(tenant_id=tenant_id, user_id=editor_id)
if not admin["superAdmin"] and not admin["admin"]:
return {"errors": ["unauthorized"]}
keys = list(changes.keys())
for k in keys:
if k not in ALLOW_EDIT:
changes.pop(k)
keys = list(changes.keys())
if len(keys) > 0:
if "email" in keys and changes["email"] != user["email"]:
if email_exists(changes["email"]):
return {"errors": ["email already exists."]}
if get_deleted_user_by_email(changes["email"]) is not None:
return {"errors": ["email previously deleted."]}
if "admin" in keys:
changes["role"] = "admin" if changes.pop("admin") else "member"
if len(changes.keys()) > 0:
updated_user = update(tenant_id=tenant_id, user_id=user_id_to_update, changes=changes)
return {"data": updated_user}
return {"data": user}
def get_by_email_only(email):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""SELECT
users.user_id AS id,
1 AS tenant_id,
users.email,
users.role,
users.name,
basic_authentication.generated_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
WHERE
users.email = %(email)s
AND users.deleted_at IS NULL;""",
{"email": email})
)
r = cur.fetchall()
return helper.list_to_camel_case(r)
def get_by_email_reset(email, reset_token):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""SELECT
users.user_id AS id,
1 AS tenant_id,
users.email,
users.role,
users.name,
basic_authentication.generated_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
WHERE
users.email = %(email)s
AND basic_authentication.token =%(token)s
AND users.deleted_at IS NULL""",
{"email": email, "token": reset_token})
)
r = cur.fetchone()
return helper.dict_to_camel_case(r)
def get_members(tenant_id):
with pg_client.PostgresClient() as cur:
cur.execute(
f"""SELECT
users.user_id AS id,
users.email,
users.role,
users.name,
basic_authentication.generated_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member
FROM public.users LEFT JOIN public.basic_authentication ON users.user_id=basic_authentication.user_id
WHERE users.deleted_at IS NULL
ORDER BY name, id"""
)
r = cur.fetchall()
if len(r):
return helper.list_to_camel_case(r)
return []
def delete_member(user_id, tenant_id, id_to_delete):
if user_id == id_to_delete:
return {"errors": ["unauthorized, cannot delete self"]}
admin = get(user_id=user_id, tenant_id=tenant_id)
if admin["member"]:
return {"errors": ["unauthorized"]}
to_delete = get(user_id=id_to_delete, tenant_id=tenant_id)
if to_delete is None:
return {"errors": ["not found"]}
if to_delete["superAdmin"]:
return {"errors": ["cannot delete super admin"]}
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(f"""UPDATE public.users
SET deleted_at = timezone('utc'::text, now())
WHERE user_id=%(user_id)s;""",
{"user_id": id_to_delete}))
return {"data": get_members(tenant_id=tenant_id)}
def change_password(tenant_id, user_id, email, old_password, new_password):
item = get(tenant_id=tenant_id, user_id=user_id)
if item is None:
return {"errors": ["access denied"]}
if old_password == new_password:
return {"errors": ["old and new password are the same"]}
auth = authenticate(email, old_password, for_change_password=True)
if auth is None:
return {"errors": ["wrong password"]}
changes = {"password": new_password, "generatedPassword": False}
return {"data": update(tenant_id=tenant_id, user_id=user_id, changes=changes),
"jwt": authenticate(email, new_password)["jwt"]}
def count_members():
with pg_client.PostgresClient() as cur:
cur.execute("""SELECT COUNT(user_id)
FROM public.users WHERE deleted_at IS NULL;""")
r = cur.fetchone()
return r["count"]
def email_exists(email):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""SELECT
count(user_id)
FROM public.users
WHERE
email = %(email)s
AND deleted_at IS NULL
LIMIT 1;""",
{"email": email})
)
r = cur.fetchone()
return r["count"] > 0
def get_deleted_user_by_email(email):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"""SELECT
*
FROM public.users
WHERE
email = %(email)s
AND deleted_at NOTNULL
LIMIT 1;""",
{"email": email})
)
r = cur.fetchone()
return helper.dict_to_camel_case(r)
def auth_exists(user_id, tenant_id, jwt_iat, jwt_aud):
with pg_client.PostgresClient() as cur:
cur.execute(
cur.mogrify(
f"SELECT user_id AS id,jwt_iat, changed_at FROM public.users INNER JOIN public.basic_authentication USING(user_id) WHERE user_id = %(userId)s AND deleted_at IS NULL LIMIT 1;",
{"userId": user_id})
)
r = cur.fetchone()
return r is not None \
and r.get("jwt_iat") is not None \
and (abs(jwt_iat - TimeUTC.datetime_to_timestamp(r["jwt_iat"]) // 1000) <= 1 \
or (jwt_aud.startswith("plugin") \
and (r["changed_at"] is None \
or jwt_iat >= (TimeUTC.datetime_to_timestamp(r["changed_at"]) // 1000)))
)
@dev.timed
def authenticate(email, password, for_change_password=False, for_plugin=False):
with pg_client.PostgresClient() as cur:
query = cur.mogrify(
f"""SELECT
users.user_id AS id,
1 AS tenant_id,
users.role,
users.name,
basic_authentication.generated_password AS change_password,
(CASE WHEN users.role = 'owner' THEN TRUE ELSE FALSE END) AS super_admin,
(CASE WHEN users.role = 'admin' THEN TRUE ELSE FALSE END) AS admin,
(CASE WHEN users.role = 'member' THEN TRUE ELSE FALSE END) AS member,
users.appearance
FROM public.users INNER JOIN public.basic_authentication USING(user_id)
WHERE users.email = %(email)s
AND basic_authentication.password = crypt(%(password)s, basic_authentication.password)
AND basic_authentication.user_id = (SELECT su.user_id FROM public.users AS su WHERE su.email=%(email)s AND su.deleted_at IS NULL LIMIT 1)
LIMIT 1;""",
{"email": email, "password": password})
cur.execute(query)
r = cur.fetchone()
if r is not None:
if for_change_password:
return True
r = helper.dict_to_camel_case(r, ignore_keys=["appearance"])
query = cur.mogrify(
f"""UPDATE public.users
SET jwt_iat = timezone('utc'::text, now())
WHERE user_id = %(user_id)s
RETURNING jwt_iat;""",
{"user_id": r["id"]})
cur.execute(query)
return {
"jwt": authorizers.generate_jwt(r['id'], r['tenantId'],
TimeUTC.datetime_to_timestamp(cur.fetchone()["jwt_iat"]),
aud=f"plugin:{helper.get_stage_name()}" if for_plugin else f"front:{helper.get_stage_name()}"),
"email": email,
**r
}
return None
|
py | b4166dbb1be04e7543536a2706dd02795c9729df | import pytest
from pgtonic.pg13.create_trigger import TEMPLATES
@pytest.mark.parametrize(
"sql,is_match",
[
("CREATE TRIGGER my_trig AFTER INSERT ON api.account EXECUTE FUNCTION oli.func ()", True),
("CREATE TRIGGER my_trig AFTER INSERT ON api.account EXECUTE FUNCTION oli.func ( param1 )", True),
("CREATE TRIGGER my_trig AFTER INSERT ON api.account EXECUTE FUNCTION oli.func( param1 )", True),
("CREATE CONSTRAINT TRIGGER my_trig AFTER INSERT ON api.account EXECUTE FUNCTION oli.func( param1)", True),
("CREATE TRIGGER my_trig AFTER INSERT ON api.account EXECUTE FUNCTION oli.func(param1 )", True),
("CREATE TRIGGER my_trig AFTER INSERT ON api.account EXECUTE FUNCTION oli.func(param1,param2 )", True),
("CREATE TRIGGER my_trig AFTER INSERT ON api.account EXECUTE FUNCTION oli.func(param1, param2 )", True),
(
"CREATE TRIGGER my_trig BEFORE INSERT OR DELETE OR UPDATE ON public.book EXECUTE PROCEDURE somefunc( param1 )",
True,
),
],
)
def test_pg13_grant(sql: str, is_match: bool) -> None:
assert any([x.is_match(sql) for x in TEMPLATES]) == is_match
|
Subsets and Splits